-- cgit v1.2.3 From d8e1da2f6465e3d6baa0c6e921a3cc0b9ce3f2c7 Mon Sep 17 00:00:00 2001 From: fgodi Date: Fri, 13 Oct 2017 09:33:33 +0000 Subject: module toplex_map added git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2784 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 61f8f8a2d3920dbc30b86dd808a0e316383af137 --- src/Toplex_map/doc/Intro_Toplex_map.h | 57 ++++ src/Toplex_map/doc/map.png | Bin 0 -> 278692 bytes src/Toplex_map/example/CMakeLists.txt | 4 + src/Toplex_map/example/chrono.cpp | 137 +++++++++ src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 207 ++++++++++++++ src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 45 +++ src/Toplex_map/include/gudhi/Lazy_toplex_map.h | 218 +++++++++++++++ src/Toplex_map/include/gudhi/Toplex_map.h | 307 +++++++++++++++++++++ src/Toplex_map/test/CMakeLists.txt | 14 + src/Toplex_map/test/test.cpp | 71 +++++ 10 files changed, 1060 insertions(+) create mode 100644 src/Toplex_map/doc/Intro_Toplex_map.h create mode 100644 src/Toplex_map/doc/map.png create mode 100644 src/Toplex_map/example/CMakeLists.txt create mode 100644 src/Toplex_map/example/chrono.cpp create mode 100644 src/Toplex_map/include/gudhi/Fake_simplex_tree.h create mode 100644 src/Toplex_map/include/gudhi/Filtered_toplex_map.h create mode 100644 src/Toplex_map/include/gudhi/Lazy_toplex_map.h create mode 100644 src/Toplex_map/include/gudhi/Toplex_map.h create mode 100644 src/Toplex_map/test/CMakeLists.txt create mode 100644 src/Toplex_map/test/test.cpp diff --git a/src/Toplex_map/doc/Intro_Toplex_map.h b/src/Toplex_map/doc/Intro_Toplex_map.h new file mode 100644 index 00000000..da9562ec --- /dev/null +++ b/src/Toplex_map/doc/Intro_Toplex_map.h @@ -0,0 +1,57 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author: François Godi + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef DOC_TOPLEX_MAP_H_ +#define DOC_TOPLEX_MAP_H_ + +// needs namespace for Doxygen to link on classes +namespace Gudhi { + +/** \defgroup toplex_map Toplex Map + * + * \author François Godi + * @{ + * + * \section toplexmapdefinition Definition + * + * Let's consider a simplicial complex, denote by $d$ its dimension + * and by $k$ its number of maximal simplices. + * Furthermore, denote by $\gamma_0$ the maximal number of toplices, i.e. maximal simplices, + * that contain a same vertex. + * + * The goal of the Toplex Map is both to represent the complex in optimal + * O(kd) space and to provide fast standard operations such as : insertion, removal + * and membership of a simplex, contraction of an edge, collapses. The time needed + * for these operation is linear or quadratic in $\gamma_0$ and $d$. + * + * Toplex map is composed firstly of a raw storage of toplices and secondly of a + * map which associate any vertex to a set of pointers toward all toplices + * containing this vertex. + * + * \image html map.png + * + */ +/** @} */ // end defgroup toplex_map + +} // namespace Gudhi + +#endif // DOC_TOPLEX_MAP_H_ diff --git a/src/Toplex_map/doc/map.png b/src/Toplex_map/doc/map.png new file mode 100644 index 00000000..d1987043 Binary files /dev/null and b/src/Toplex_map/doc/map.png differ diff --git a/src/Toplex_map/example/CMakeLists.txt b/src/Toplex_map/example/CMakeLists.txt new file mode 100644 index 00000000..2341fe06 --- /dev/null +++ b/src/Toplex_map/example/CMakeLists.txt @@ -0,0 +1,4 @@ +cmake_minimum_required(VERSION 2.6) +project(Toplex_map_examples) + +add_executable(chrono chrono.cpp) diff --git a/src/Toplex_map/example/chrono.cpp b/src/Toplex_map/example/chrono.cpp new file mode 100644 index 00000000..d93d1e1f --- /dev/null +++ b/src/Toplex_map/example/chrono.cpp @@ -0,0 +1,137 @@ +#include +#include +#include + +#include +#include + +using namespace Gudhi; + +typedef Simplex typeVectorVertex; +typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool; + +class ST_wrapper { + +public: + void insert_simplex(const Simplex& tau); + bool membership(const Simplex& tau); + Vertex contraction(const Vertex x, const Vertex y); + std::size_t num_simplices(); + +private: + Simplex_tree<> simplexTree; + void erase_max(const Simplex& sigma); +}; + +void ST_wrapper::insert_simplex(const Simplex& tau){ + simplexTree.insert_simplex_and_subfaces(tau); +} + +bool ST_wrapper::membership(const Simplex& tau) { + return simplexTree.find(tau) != simplexTree.null_simplex(); +} + +void ST_wrapper::erase_max(const Simplex& sigma){ + if(membership(sigma)) + simplexTree.remove_maximal_simplex(simplexTree.find(sigma)); +} + +Vertex ST_wrapper::contraction(const Vertex x, const Vertex y){ + Simplex sx; sx.insert(x); + auto hx = simplexTree.find(sx); + if(hx != simplexTree.null_simplex()) + for(auto h : simplexTree.cofaces_simplex_range(hx,0)){ + auto sr = simplexTree.simplex_vertex_range(h); + Simplex sigma(sr.begin(),sr.end()); + erase_max(sigma); + sigma.erase(x); + sigma.insert(y); + insert_simplex(sigma); + } + return y; +} + +std::size_t ST_wrapper::num_simplices(){ + return simplexTree.num_simplices(); +} + + + +int n = 300; + +int nb_insert_simplex1 = 3000; +int nb_membership1 = 4000; +int nb_contraction = 300; +int nb_insert_simplex2 = 3000; +int nb_membership2 = 400000; + +Simplex random_simplex(int n, int d){ + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution<> dis(1, n); + Simplex s; + while(s.size()!=d) + s.insert(dis(gen)); + return s; +} + +std::vector r_vector_simplices(int n, int max_d, int m){ + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution<> dis(1, max_d); + std::vector v; + for(int i=0; i +void chrono(int n, int d){ + complex_type K; + std::vector simplices_insert_simplex1 = r_vector_simplices(n,d,nb_insert_simplex1); + std::vector simplices_membership1 = r_vector_simplices(n,d,nb_membership1); + std::vector simplices_insert_simplex2 = r_vector_simplices(n - 2*nb_contraction,d,nb_insert_simplex2); + std::vector simplices_membership2 = r_vector_simplices(n - 2*nb_contraction,d,nb_membership2); + std::chrono::time_point start, end; + + for(const Simplex& s : simplices_insert_simplex1) + K.insert_simplex(s); + + for(const Simplex& s : simplices_membership1) + K.membership(s); + + start = std::chrono::system_clock::now(); + for(int i = 0; i<=nb_contraction; i++) + K.contraction(n-2*i,n-2*i-1); + end = std::chrono::system_clock::now(); + auto c3 = std::chrono::duration_cast(end-start).count(); + + start = std::chrono::system_clock::now(); + for(const Simplex& s : simplices_insert_simplex2) + K.insert_simplex(s); + end = std::chrono::system_clock::now(); + auto c1 = std::chrono::duration_cast(end-start).count(); + + start = std::chrono::system_clock::now(); + for(const Simplex& s : simplices_membership2) + K.membership(s); + end = std::chrono::system_clock::now(); + auto c2 = std::chrono::duration_cast(end-start).count(); + + std::cout << c1 << "\t \t" << c2 << "\t \t" << c3 << "\t \t" << K.num_simplices() << std::endl; +} + +int main(){ + for(int d=5;d<=40;d+=5){ + std::cout << "d=" << d << " \t Insertions \t Membership \t Contractions \t Size" << std::endl; + std::cout << "T Map \t \t"; + chrono(n,d); + std::cout << "Lazy \t \t"; + chrono(n,d); + if(d<=15){ + std::cout << "ST \t \t"; + chrono(n,d); + } + std::cout << std::endl; + } +} diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h new file mode 100644 index 00000000..5c7e7b12 --- /dev/null +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -0,0 +1,207 @@ +#ifndef FAKE_SIMPLEX_TREE_H +#define FAKE_SIMPLEX_TREE_H + +#include +#include + +namespace Gudhi { + +class Fake_simplex_tree : public Filtered_toplex_map { + +public: + + typedef Vertex Vertex_handle; + + typedef Simplex_ptr Simplex_handle; + + /** \brief Inserts a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` in the simplicial + * complex. */ + template + void insert_graph(const OneSkeletonGraph& skel_graph); + + /** \brief Expands the simplicial complex containing only its one skeleton until a given maximal dimension as + * explained in \ref ripsdefinition. */ + void expansion(int max_dim); + + /** \brief Returns the number of vertices in the simplicial complex. */ + std::size_t num_vertices(); + + Simplex_ptr_set candidates() const; + + std::size_t dimension() const; + + std::size_t num_simplices() const; + + std::size_t num_vertices() const; + + Simplex simplex_vertex_range(Simplex_ptr &sptr) const; + + std::vector max_simplices() const; + + std::unordered_set filtration_simplex_range() const; + + std::unordered_set skeleton_simplex_range(int d=std::numeric_limits::max()) const; + + std::size_t dimension(Simplex_ptr& sptr) const; + + void assign_filtration(Simplex_ptr& f_simplex, Filtration_value alpha_complex_filtration); + + void make_filtration_non_decreasing(); + +protected: + + /** \internal Does all the facets of the given simplex belong to the complex ? + * \ingroup toplex_map */ + template + bool all_facets_inside(const Input_vertex_range &vertex_range) const; + +}; + +template +void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ + typename boost::graph_traits::edge_iterator e_it, + e_it_end; + for (std::tie(e_it, e_it_end) = boost::edges(skel_graph); e_it != e_it_end; ++e_it) { + auto u = source(*e_it, skel_graph); + auto v = target(*e_it, skel_graph); + if(u +bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ + Simplex sigma(vertex_range); + for(const Simplex& s : facets(sigma)) + if(!filtrations.count(get_key(s))) return false; + return true; +} + +Simplex_ptr_set Fake_simplex_tree::candidates() const{ + Simplex_ptr_set c; + std::unordered_map, Toplex_map::Sptr_hash, Toplex_map::Sptr_equal> facets_to_max; + for(const auto& kv : filtrations){ + Simplex sigma (*(kv.first)); + for(Vertex v : sigma){ + sigma.erase(v); + auto sptr = get_key(sigma); + if(!facets_to_max.count(sptr)) facets_to_max.emplace(sptr, std::vector()); + facets_to_max.at(sptr).emplace_back(v); + sigma.insert(v); + } + } + for(const auto& kv : facets_to_max){ + std::unordered_set facets(kv.second.begin(), kv.second.end()); + for(Vertex v : kv.second){ + facets.erase(v); + for(Vertex w : facets){ + Simplex sigma(*(kv.first)); + sigma.insert(v); + sigma.insert(w); + if(all_facets_inside(sigma)) + c.emplace(get_key(sigma)); + } + facets.emplace(v); + } + } + return c; +} + +std::size_t Fake_simplex_tree::dimension() const { + std::size_t max = 0; + for(auto kv : filtrations) + max = std::max(max, kv.first->size()); + return max; +} + +std::size_t Fake_simplex_tree::num_simplices() const { + return filtration_simplex_range().size(); +} + +std::size_t Fake_simplex_tree::num_vertices() const { + std::unordered_set vertices; + for(auto kv : filtrations) + for (Vertex v : *(kv.first)) + vertices.emplace(v); + return vertices.size(); +} + +Simplex Fake_simplex_tree::simplex_vertex_range(Simplex_ptr& sptr) const { + return *sptr; +} + +std::unordered_set Fake_simplex_tree::filtration_simplex_range() const{ + std::vector m = max_simplices(); + std::unordered_set seen; + while(m.begin()!=m.end()){ + Simplex_ptr& sptr = m.back(); + m.pop_back(); + if(seen.find(sptr)!=seen.end()){ + seen.emplace(sptr); + for(Simplex& sigma : facets(*sptr)) + m.emplace_back(get_key(sigma)); + } + } + return seen; +} + +std::unordered_set Fake_simplex_tree::skeleton_simplex_range(int d) const{ + std::unordered_set simplices; + for(auto sptr: filtration_simplex_range()) + if(sptr->size()<=d) + simplices.emplace(sptr); + return simplices; +} + +std::vector Fake_simplex_tree::max_simplices() const{ + std::vector s; + for(auto kv : filtrations) + s.emplace_back(kv.first); + return s; +} + +std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ + return sptr->size(); +} + + +void Fake_simplex_tree::assign_filtration(Simplex_ptr& f_simplex, Filtration_value alpha_complex_filtration){ + filtrations.emplace(f_simplex,alpha_complex_filtration); +} + +void Fake_simplex_tree::make_filtration_non_decreasing(){ + for(auto yt = filtrations.begin(); yt != filtrations.end(); ++yt) + for (auto it = toplex_maps.begin(); it != toplex_maps.end(); ++it){ + if(it->first == yt -> second) + break; + if(it->second.membership(*(yt->first))) + for(const Simplex_ptr& sptr : it->second.maximal_cofaces(*(yt->first))){ + it->second.erase_maximal(sptr); + toplex_maps.at(yt->second).insert_simplex(*sptr); + filtrations.emplace(sptr,yt->second); + } + } + +} + + + +} //namespace Gudhi + +#endif /* FAKE_SIMPLEX_TREE_H */ + diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h new file mode 100644 index 00000000..4b626f11 --- /dev/null +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -0,0 +1,45 @@ +#ifndef FILTERED_TOPLEX_MAP_H +#define FILTERED_TOPLEX_MAP_H + +#include +#include + +#define filtration_upper_bound std::numeric_limits::max() + +namespace Gudhi { + +typedef double Filtration_value; + +class Filtered_toplex_map { + +public: + template + void insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = filtration_upper_bound); + + template + Filtration_value filtration(const Input_vertex_range &vertex_range) const; + +protected: + std::unordered_map toplex_maps; + std::unordered_map filtrations; + +}; + +template +void Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f){ + if(!toplex_maps.count(f)) toplex_maps.emplace(f,Toplex_map()); + toplex_maps.at(f).insert_simplex(vertex_range); + filtrations.emplace(get_key(vertex_range),f); +} + +template +Filtration_value Filtered_toplex_map::filtration(const Input_vertex_range &vertex_range) const{ + for(auto kv : toplex_maps) + if(kv.second.membership(vertex_range)) + return kv.first; + return filtration_upper_bound; +} + +} //namespace Gudhi + +#endif /* FILTERED_TOPLEX_MAP_H */ diff --git a/src/Toplex_map/include/gudhi/Lazy_toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_toplex_map.h new file mode 100644 index 00000000..3ffe8214 --- /dev/null +++ b/src/Toplex_map/include/gudhi/Lazy_toplex_map.h @@ -0,0 +1,218 @@ +#ifndef LAZY_TOPLEX_MAP_H +#define LAZY_TOPLEX_MAP_H + +#include +#include + +namespace Gudhi { + +class Lazy_Toplex_map { + +public: + template + void insert_max_simplex(const Input_vertex_range &vertex_range); + template + bool insert_simplex(const Input_vertex_range &vertex_range); + template + void remove_simplex(const Input_vertex_range &vertex_range); + + template + bool membership(const Input_vertex_range &vertex_range); + template + bool all_facets_inside(const Input_vertex_range &vertex_range); + + Vertex contraction(const Vertex x, const Vertex y); + + std::size_t num_simplices() const; + +private: + template + void erase_max(const Input_vertex_range &vertex_range); + template + Vertex best_index(const Input_vertex_range &vertex_range); + void clean(const Vertex v); + + std::unordered_map t0; + bool empty_toplex; // Is the empty simplex a toplex ? + + typedef boost::heap::fibonacci_heap> PriorityQueue; + PriorityQueue cleaning_priority; + std::unordered_map cp_handles; + + std::unordered_map gamma0_lbounds; + std::size_t get_gamma0_lbound(const Vertex v) const; + + std::size_t size_lbound = 0; + std::size_t size = 0; + + const double alpha = 2; //time + const double betta = 3; //memory +}; + +template +void Lazy_Toplex_map::insert_max_simplex(const Input_vertex_range &vertex_range){ + for(const Vertex& v : vertex_range) + if(!gamma0_lbounds.count(v)) gamma0_lbounds.emplace(v,1); + else gamma0_lbounds[v]++; + size_lbound++; + insert_simplex(vertex_range); +} + +template +bool Lazy_Toplex_map::insert_simplex(const Input_vertex_range &vertex_range){ + Simplex sigma(vertex_range.begin(),vertex_range.end()); + empty_toplex = (sigma.size()==0); //vérifier la gestion de empty face + Simplex_ptr sptr = std::make_shared(sigma); + bool inserted = false; + for(const Vertex& v : sigma){ + if(!t0.count(v)){ + t0.emplace(v, Simplex_ptr_set()); + auto v_handle = cleaning_priority.push(std::make_pair(0, v)); + cp_handles.emplace(v, v_handle); + } + inserted = t0.at(v).emplace(sptr).second; + cleaning_priority.update(cp_handles.at(v), std::make_pair(t0.at(v).size() - get_gamma0_lbound(v),v)); + } + if(inserted) + size++; + if(size > size_lbound * betta) + clean(cleaning_priority.top().second); + return inserted; +} + +template +void Lazy_Toplex_map::remove_simplex(const Input_vertex_range &vertex_range){ + if(vertex_range.begin()==vertex_range.end()){ + t0.clear(); + gamma0_lbounds.clear(); + cleaning_priority.clear(); + size_lbound = 0; + size = 0; + empty_toplex = false; + } + else { + const Vertex& v = best_index(vertex_range); + //Copy constructor needed because the set is modified + if(t0.count(v)) for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))) + if(included(vertex_range, *sptr)){ + erase_max(*sptr); + for(const Simplex& f : facets(vertex_range)) + insert_max_simplex(f); + } + } +} + +template +bool Lazy_Toplex_map::membership(const Input_vertex_range &vertex_range){ + if(t0.size()==0 && !empty_toplex) return false; //empty complex + if(vertex_range.begin()==vertex_range.end()) return true; //empty query simplex + Vertex v = best_index(vertex_range); + if(!t0.count(v)) return false; + for(const Simplex_ptr& sptr : t0.at(v)) + if(included(vertex_range, *sptr)) return true; + return false; +} + +template +bool Lazy_Toplex_map::all_facets_inside(const Input_vertex_range &vertex_range){ + Simplex sigma(vertex_range.begin(),vertex_range.end()); + Vertex v = best_index(sigma); + if(!t0.count(v)) return false; + Simplex f = sigma; f.erase(v); + if(!membership(f)) return false; + std::unordered_set facets_inside; + for(const Simplex_ptr& sptr : t0.at(v)) + for(const Vertex& w : sigma){ + f = sigma; f.erase(w); + if(included(f, *sptr)) facets_inside.insert(w); + } + return facets_inside.size() == sigma.size() - 1; +} + +/* Returns the remaining vertex */ +Vertex Lazy_Toplex_map::contraction(const Vertex x, const Vertex y){ + if(!t0.count(x)) return y; + if(!t0.count(y)) return x; + Vertex k, d; + if(t0.at(x).size() > t0.at(y).size()) + k=x, d=y; + else + k=y, d=x; + //Copy constructor needed because the set is modified + for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ + Simplex sigma(*sptr); + erase_max(sigma); + sigma.erase(d); + sigma.insert(k); + insert_simplex(sigma); + } + t0.erase(d); + return k; +} + +/* No facets insert_simplexed */ +template +inline void Lazy_Toplex_map::erase_max(const Input_vertex_range &vertex_range){ + Simplex sigma(vertex_range.begin(),vertex_range.end()); + empty_toplex = false; + Simplex_ptr sptr = std::make_shared(sigma); + bool erased; + for(const Vertex& v : sigma){ + erased = t0.at(v).erase(sptr) > 0; + if(t0.at(v).size()==0) + t0.erase(v); + } + if (erased) + size--; +} + +template +Vertex Lazy_Toplex_map::best_index(const Input_vertex_range &vertex_range){ + Simplex tau(vertex_range.begin(),vertex_range.end()); + std::size_t min = std::numeric_limits::max(); Vertex arg_min = -1; + for(const Vertex& v : tau) + if(!t0.count(v)) return v; + else if(t0.at(v).size() < min) + min = t0.at(v).size(), arg_min = v; + if(min > alpha * get_gamma0_lbound(arg_min)) + clean(arg_min); + return arg_min; +} + +std::size_t Lazy_Toplex_map::get_gamma0_lbound(const Vertex v) const{ + return gamma0_lbounds.count(v) ? gamma0_lbounds.at(v) : 0; +} + + +void Lazy_Toplex_map::clean(const Vertex v){ + Toplex_map toplices; + std::unordered_map> dsorted_simplices; + int max_dim = 0; + for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))){ + if(sptr->size() > max_dim){ + for(int d = max_dim+1; d<=sptr->size(); d++) + dsorted_simplices.emplace(d, std::vector()); + max_dim = sptr->size(); + } + dsorted_simplices[sptr->size()].emplace_back(*sptr); + erase_max(*sptr); + } + for(int d = max_dim; d>=1; d--) + for(const Simplex &s : dsorted_simplices.at(d)) + if(!toplices.membership(s)) + toplices.insert_independent_simplex(s); + Simplex sv; sv.insert(v); + auto clean_cofaces = toplices.maximal_cofaces(sv); + size_lbound = size_lbound - get_gamma0_lbound(v) + clean_cofaces.size(); + gamma0_lbounds[v] = clean_cofaces.size(); + for(const Simplex_ptr& sptr : clean_cofaces) + insert_simplex(*sptr); +} + +std::size_t Lazy_Toplex_map::num_simplices() const{ + return size; +} + +} //namespace Gudhi + +#endif /* LAZY_TOPLEX_MAP_H */ diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h new file mode 100644 index 00000000..0b6cad37 --- /dev/null +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -0,0 +1,307 @@ +#ifndef TOPLEX_MAP_H +#define TOPLEX_MAP_H + +#include +#include +#include +#include + +#define vertex_upper_bound std::numeric_limits::max() + +namespace Gudhi { + +/** Vertex is the type of vertices. + * \ingroup toplex_map */ +typedef std::size_t Vertex; + +/** Simplex is the type of simplices. + * \ingroup toplex_map */ +typedef std::unordered_set Simplex; + +/** A Toplex_map represents the simplicial complex. + * A "toplex" is a maximal simplex. + * \ingroup toplex_map */ +class Toplex_map { + +public: + /** The type of the pointers to maximal simplices. + * \ingroup toplex_map */ + typedef std::shared_ptr Simplex_ptr; + + struct Sptr_hash{ std::size_t operator()(const Simplex_ptr& s) const; }; + struct Sptr_equal{ std::size_t operator()(const Simplex_ptr& a, const Simplex_ptr& b) const; }; + /** The type of the sets of Simplex_ptr. + * \ingroup toplex_map */ + typedef std::unordered_set Simplex_ptr_set; + + /** \brief Adds the given simplex to the complex. + * Nothing happens if the simplex has a coface in the complex. + * \ingroup toplex_map */ + template + void insert_simplex(const Input_vertex_range &vertex_range); + + /** \brief Removes the given simplex and its cofaces from the complex. + * Its faces are kept inside. + * \ingroup toplex_map */ + template + void remove_simplex(const Input_vertex_range &vertex_range); + + /** Does a simplex belong to the complex ? + * \ingroup toplex_map */ + template + bool membership(const Input_vertex_range &vertex_range) const; + + /** Does a simplex is a toplex ? + * \ingroup toplex_map */ + template + bool maximality(const Input_vertex_range &vertex_range) const; + + /** Gives a set of pointers to the maximal cofaces of a simplex. + * Gives the toplices if given the empty simplex. + * Gives not more than max_number maximal cofaces if max_number is strictly positive. + * \ingroup toplex_map */ + template + Simplex_ptr_set maximal_cofaces(const Input_vertex_range &vertex_range, const std::size_t max_number = 0) const; + + /** Contracts one edge in the complex. + * The edge has to verify the link condition if you want to preserve topology. + * Returns the remaining vertex. + * \ingroup toplex_map */ + Vertex contraction(const Vertex x, const Vertex y); + + /** Adds the given simplex to the complex. + * The simplex must not have neither maximal face nor coface in the complex. + * \ingroup toplex_map */ + template + void insert_independent_simplex(const Input_vertex_range &vertex_range); + + + /** \internal Removes a toplex without adding facets after. + * \ingroup toplex_map */ + void erase_maximal(const Simplex_ptr& sptr); + + /** Removes a vertex from any simplex containing it. + * \ingroup toplex_map */ + void remove_vertex(const Vertex x); + + /** \brief Number of maximal simplices. + * /!\ Not efficient ! + * \ingroup toplex_map */ + std::size_t num_simplices() const; + +protected: + /** \internal Gives an index in order to look for a simplex quickly. + * \ingroup toplex_map */ + template + Vertex best_index(const Input_vertex_range &vertex_range) const; + + /** \internal The map from vertices to toplices + * \ingroup toplex_map */ + std::unordered_map t0; + +}; + +typedef Toplex_map::Simplex_ptr Simplex_ptr; +typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; + +// Pointers are also used as key in the hash sets. +template +Simplex_ptr get_key(const Input_vertex_range &vertex_range); + +// Is the first simplex a face of the second ? +template +bool included(const Input_vertex_range1 &vertex_range1, const Input_vertex_range2 &vertex_range2); + +// All the facets of the given simplex. +template +std::vector facets(const Input_vertex_range &vertex_range); + +template +void Toplex_map::insert_simplex(const Input_vertex_range &vertex_range){ + if(membership(vertex_range)) return; + bool replace_facets = true; + for(const Simplex& facet : facets(vertex_range)) + if(!maximality(facet)) + { + replace_facets=false; + break; + } + if(replace_facets) + for(const Simplex& facet : facets(vertex_range)) + erase_maximal(get_key(facet)); + else + for(const Vertex& v : vertex_range) + if(t0.count(v)) for(const Simplex_ptr& fptr : Simplex_ptr_set(t0.at(v))) + //Copy constructor needed because the set is modified + if(included(*fptr,vertex_range)) erase_maximal(fptr); + // We erase all the maximal faces of the simplex + insert_independent_simplex(vertex_range); +} + +template +void Toplex_map::remove_simplex(const Input_vertex_range &vertex_range){ + if(vertex_range.begin()==vertex_range.end()) + t0.clear(); + // Removal of the empty simplex means cleaning everything + else { + const Vertex& v = best_index(vertex_range); + if(t0.count(v)) for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))) + //Copy constructor needed because the set is modified + if(included(vertex_range, *sptr)){ + erase_maximal(sptr); + for(const Simplex& f : facets(vertex_range)) + if(!membership(f)) insert_independent_simplex(f); + // We add the facets which are new maximal simplices + } + } +} + +template +bool Toplex_map::membership(const Input_vertex_range &vertex_range) const{ + if(t0.size()==0) return false; + const Vertex& v = best_index(vertex_range); + if(!t0.count(v)) return false; + if(maximality(vertex_range)) return true; + for(const Simplex_ptr& sptr : t0.at(v)) + if(included(vertex_range, *sptr)) + return true; + return false; +} + +template +bool Toplex_map::maximality(const Input_vertex_range &vertex_range) const{ + const Vertex& v = best_index(vertex_range); + if(!t0.count(v)) return false; + return t0.at(v).count(get_key(vertex_range)); +} + +template +Simplex_ptr_set Toplex_map::maximal_cofaces(const Input_vertex_range &vertex_range, const std::size_t max_number) const{ + Simplex_ptr_set cofaces; + if(maximality(vertex_range)) + cofaces.emplace(get_key(vertex_range)); + else if(vertex_range.begin()==vertex_range.end()) + for(const auto& kv : t0) + for(const Simplex_ptr& sptr : kv.second){ + //kv.second is a Simplex_ptr_set + cofaces.emplace(sptr); + if(cofaces.size()==max_number) + return cofaces; + } + else { + const Vertex& v = best_index(vertex_range); + if(t0.count(v)) for(const Simplex_ptr& sptr : t0.at(v)) + if(included(vertex_range, *sptr)){ + cofaces.emplace(sptr); + if(cofaces.size()==max_number) + return cofaces; + } + } + return cofaces; +} + +Vertex Toplex_map::contraction(const Vertex x, const Vertex y){ + if(!t0.count(x)) return y; + if(!t0.count(y)) return x; + int k, d; + if(t0.at(x).size() > t0.at(y).size()) + k=x, d=y; + else + k=y, d=x; + for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ + //Copy constructor needed because the set is modified + Simplex sigma(*sptr); + erase_maximal(sptr); + sigma.erase(d); + sigma.insert(k); + insert_simplex(sigma); + } + return k; +} + +template +void Toplex_map::insert_independent_simplex(const Input_vertex_range &vertex_range){ + for(const Vertex& v : vertex_range){ + if(!t0.count(v)) t0.emplace(v, Simplex_ptr_set()); + t0.at(v).emplace(get_key(vertex_range)); + } +} + +void Toplex_map::remove_vertex(const Vertex x){ + for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(x))){ + Simplex sigma(*sptr); + erase_maximal(sptr); + sigma.erase(x); + insert_simplex(sigma); + } +} + +std::size_t Toplex_map::num_simplices() const{ + return maximal_cofaces(Simplex()).size(); +} + +inline void Toplex_map::erase_maximal(const Simplex_ptr& sptr){ + Simplex sigma(*sptr); + if (sptr->size()==0) + sigma.insert(vertex_upper_bound); + for(const Vertex& v : sigma){ + t0.at(v).erase(sptr); + if(t0.at(v).size()==0) t0.erase(v); + } +} + +template +Vertex Toplex_map::best_index(const Input_vertex_range &vertex_range) const{ + std::size_t min = std::numeric_limits::max(); + Vertex arg_min = vertex_upper_bound; + for(const Vertex& v : vertex_range) + if(!t0.count(v)) return v; + else if(t0.at(v).size() < min) + min = t0.at(v).size(), arg_min = v; + return arg_min; +} + +std::size_t Toplex_map::Sptr_equal::operator()(const Simplex_ptr& s1, const Simplex_ptr& s2) const { + if (s1->size() != s2->size()) return false; + return included(*s1,*s2); + // inclusion tests equality for same size simplices +} + +std::size_t Toplex_map::Sptr_hash::operator()(const Simplex_ptr& s) const { + std::hash h_f; + //double hash works better than int hash + size_t h = 0; + for(const Vertex& v : *s) + h += h_f(static_cast(v)); + return h; +} + +template +Simplex_ptr get_key(const Input_vertex_range &vertex_range){ + Simplex s(vertex_range.begin(), vertex_range.end()); + return std::make_shared(s); +} + +template +bool included(const Input_vertex_range1 &vertex_range1, const Input_vertex_range2 &vertex_range2){ + Simplex s2(vertex_range2.begin(), vertex_range2.end()); + for(const Vertex& v : vertex_range1) + if(!s2.count(v)) return false; + return true; +} + +template +std::vector facets(const Input_vertex_range &vertex_range){ + std::vector facets; + Simplex f(vertex_range.begin(), vertex_range.end()); + for(const Vertex& v : vertex_range){ + f.erase(v); + facets.emplace_back(f); + f.insert(v); + } + return facets; +} + +} //namespace Gudhi + +#endif /* TOPLEX_MAP_H */ diff --git a/src/Toplex_map/test/CMakeLists.txt b/src/Toplex_map/test/CMakeLists.txt new file mode 100644 index 00000000..223ebccb --- /dev/null +++ b/src/Toplex_map/test/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 2.6) +project(Toplex_map_tests) + +add_executable ( ToplexMapUT test.cpp ) +target_link_libraries(ToplexMapUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + + +# Unitary tests +add_test(NAME SalUT + COMMAND ${CMAKE_CURRENT_BINARY_DIR}/ToplexMapUT + ${CMAKE_SOURCE_DIR}/src/Toplex_map/test/test.txt + # XML format for Jenkins xUnit plugin + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/ToplexMapUT.xml --log_level=test_suite --report_level=no) + diff --git a/src/Toplex_map/test/test.cpp b/src/Toplex_map/test/test.cpp new file mode 100644 index 00000000..3f4d96c2 --- /dev/null +++ b/src/Toplex_map/test/test.cpp @@ -0,0 +1,71 @@ +#include +#include +#include +#include + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "toplex map" +#include + +using namespace Gudhi; + +std::vector sigma1 = {1, 2, 3, 4}; +std::vector sigma2 = {5, 2, 3, 6}; +std::vector sigma3 = {5}; +std::vector sigma4 = {5, 2, 3}; +std::vector sigma5 = {5, 2, 7}; +std::vector sigma6 = {4, 5, 3}; +std::vector sigma7 = {4, 5, 9}; +std::vector sigma8 = {1, 2, 3, 6}; + + +BOOST_AUTO_TEST_CASE(toplexmap) { + Toplex_map K; + K.insert_simplex(sigma1); + K.insert_simplex(sigma2); + K.insert_simplex(sigma3); + K.insert_simplex(sigma6); + K.insert_simplex(sigma7); + BOOST_CHECK(K.membership(sigma4)); + BOOST_CHECK(!K.maximality(sigma5)); + BOOST_CHECK(!K.membership(sigma5)); + K.contraction(4,5); + BOOST_CHECK(!K.membership(sigma6)); +} + +BOOST_AUTO_TEST_CASE(ltoplexmap) { + Lazy_Toplex_map K; + K.insert_simplex(sigma1); + K.insert_simplex(sigma2); + K.insert_simplex(sigma3); + K.insert_simplex(sigma6); + K.insert_simplex(sigma7); + BOOST_CHECK(K.membership(sigma4)); + BOOST_CHECK(!K.membership(sigma5)); + K.contraction(4,5); + BOOST_CHECK(!K.membership(sigma6)); +} + +BOOST_AUTO_TEST_CASE(ftoplexmap) { + Filtered_toplex_map K; + K.insert_simplex_and_subfaces(sigma1, 2.); + K.insert_simplex_and_subfaces(sigma2, 2.); + K.insert_simplex_and_subfaces(sigma6, 1.); + K.insert_simplex_and_subfaces(sigma7, 1.); + BOOST_CHECK(K.filtration(sigma4)==2.); + BOOST_CHECK(K.filtration(sigma3)==1.); +} + +/* +BOOST_AUTO_TEST_CASE(toplexmap_candidates) { + Toplex_map K; + K.insert_simplex(sigma1); + K.insert_simplex(sigma2); + K.remove_simplex(sigma1); + K.remove_simplex(sigma2); + auto c = K.candidates(); + BOOST_CHECK(c.count(get_key(sigma1))); + BOOST_CHECK(c.count(get_key(sigma2))); + BOOST_CHECK(c.size()==2); +} +*/ -- cgit v1.2.3 From 64b6499858101c47374a9a6b24dd8ec001147982 Mon Sep 17 00:00:00 2001 From: fgodi Date: Fri, 13 Oct 2017 09:43:14 +0000 Subject: strong witness compatibility git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2785 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: eee09c30f4a5503f800761a7dbe4d08401b5bc1f --- src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index 5c7e7b12..60f8981a 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -14,6 +14,8 @@ public: typedef Simplex_ptr Simplex_handle; + typedef void Insertion_result_type; + /** \brief Inserts a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` in the simplicial * complex. */ template -- cgit v1.2.3 From 8fd07bda067d82fd0d345c3bde0dce7de18a6722 Mon Sep 17 00:00:00 2001 From: fgodi Date: Fri, 13 Oct 2017 10:08:55 +0000 Subject: cmakelists git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2786 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 83c6616972403884967c825f20155cecf124c39e --- CMakeLists.txt | 1 + src/CMakeLists.txt | 1 + src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 13 +++++++++++-- .../example/example_strong_witness_complex_off.cpp | 4 +++- 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index fbb359e1..f872b5df 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -47,6 +47,7 @@ add_gudhi_module(Skeleton_blocker) add_gudhi_module(Spatial_searching) add_gudhi_module(Subsampling) add_gudhi_module(Tangential_complex) +add_gudhi_module(Toplex_map) add_gudhi_module(Witness_complex) message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"") diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 795005b1..e1ae774a 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -23,6 +23,7 @@ add_gudhi_module(Skeleton_blocker) add_gudhi_module(Spatial_searching) add_gudhi_module(Subsampling) add_gudhi_module(Tangential_complex) +add_gudhi_module(Toplex_map) add_gudhi_module(Witness_complex) message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"") diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index 60f8981a..10ef39d7 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -26,7 +26,7 @@ public: void expansion(int max_dim); /** \brief Returns the number of vertices in the simplicial complex. */ - std::size_t num_vertices(); + std::size_t num_vertices() const; Simplex_ptr_set candidates() const; @@ -34,7 +34,7 @@ public: std::size_t num_simplices() const; - std::size_t num_vertices() const; + void set_dimension(int d); Simplex simplex_vertex_range(Simplex_ptr &sptr) const; @@ -59,6 +59,10 @@ protected: }; +void Fake_simplex_tree::set_dimension(int d){ + +} + template void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ typename boost::graph_traits::edge_iterator e_it, @@ -149,6 +153,11 @@ Simplex Fake_simplex_tree::simplex_vertex_range(Simplex_ptr& sptr) const { std::unordered_set Fake_simplex_tree::filtration_simplex_range() const{ std::vector m = max_simplices(); + std::cout << m.size()<< std::endl; + std::cout << m.size()<< std::endl; + + std::cout << m.size()<< std::endl; + std::unordered_set seen; while(m.begin()!=m.end()){ Simplex_ptr& sptr = m.back(); diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp index 0ee9ee90..4a232481 100644 --- a/src/Witness_complex/example/example_strong_witness_complex_off.cpp +++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp @@ -21,6 +21,7 @@ */ #include +#include #include #include #include @@ -49,7 +50,8 @@ int main(int argc, char * const argv[]) { int nbL = atoi(argv[2]), lim_dim = atoi(argv[4]); double alpha2 = atof(argv[3]); clock_t start, end; - Gudhi::Simplex_tree<> simplex_tree; + //Gudhi::Simplex_tree<> simplex_tree; + Gudhi::Fake_simplex_tree simplex_tree; // Read the point file Point_vector point_vector, landmarks; -- cgit v1.2.3 From 23e265d8c48d921a51eb0265afa6d8af27b27559 Mon Sep 17 00:00:00 2001 From: fgodi Date: Wed, 25 Oct 2017 11:19:00 +0000 Subject: include limits in toplex_map git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2804 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a42cc7c557c193c57e3696eb0b877f6196841aaf --- src/Rips_complex/example/CMakeLists.txt | 2 + .../example/example_rips_complex_from_fvecs.cpp | 67 ++++++++++ src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 142 +++++++++------------ src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 8 +- src/Toplex_map/include/gudhi/Toplex_map.h | 30 ++--- src/Witness_complex/example/CMakeLists.txt | 2 + .../example/example_strong_witness_complex_off.cpp | 4 +- 7 files changed, 153 insertions(+), 102 deletions(-) create mode 100644 src/Rips_complex/example/example_rips_complex_from_fvecs.cpp diff --git a/src/Rips_complex/example/CMakeLists.txt b/src/Rips_complex/example/CMakeLists.txt index 2940f164..b854b1c9 100644 --- a/src/Rips_complex/example/CMakeLists.txt +++ b/src/Rips_complex/example/CMakeLists.txt @@ -4,6 +4,8 @@ project(Rips_complex_examples) # Point cloud add_executable ( Rips_complex_example_from_off example_rips_complex_from_off_file.cpp ) +add_executable ( Rips_complex_example_from_fvecs example_rips_complex_from_fvecs.cpp ) + add_executable ( Rips_complex_example_one_skeleton_from_points example_one_skeleton_rips_from_points.cpp ) # Distance matrix diff --git a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp new file mode 100644 index 00000000..c05d038a --- /dev/null +++ b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp @@ -0,0 +1,67 @@ +#include +// to construct Rips_complex from a fvecs file of points +#include +#include +#include +#include + +#include + +#include +#include +#include + +void usage(int nbArgs, char * const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " filename.fvecs threshold dim_max [ouput_file.txt]\n"; + std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.fvecs 60.0\n"; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if ((argc != 4) && (argc != 5)) usage(argc, (argv[0] - 1)); + + std::string file_name(argv[1]); + double threshold = atof(argv[2]); + int dim_max = atoi(argv[3]); + + // Type definitions + using K = CGAL::Epick_d; + using Point = typename K::Point_d; + //using Simplex_tree = Gudhi::Simplex_tree<>; + using Simplex_tree = Gudhi::Fake_simplex_tree; + using Filtration_value = Simplex_tree::Filtration_value; + using Rips_complex = Gudhi::rips_complex::Rips_complex; + using Point_vector = std::vector; + + // ---------------------------------------------------------------------------- + // Init of a Rips complex from an fvecs file + // ---------------------------------------------------------------------------- + Point_vector point_vector; + Gudhi::load_points_from_fvecs_file(file_name, std::back_insert_iterator< Point_vector >(point_vector)); + + Rips_complex rips_complex_from_file(point_vector, threshold, Gudhi::Euclidean_distance()); + + std::streambuf* streambufffer; + std::ofstream ouput_file_stream; + + if (argc == 5) { + ouput_file_stream.open(std::string(argv[4])); + streambufffer = ouput_file_stream.rdbuf(); + } else { + streambufffer = std::cout.rdbuf(); + } + + Simplex_tree stree; + rips_complex_from_file.create_complex(stree, dim_max); + std::ostream output_stream(streambufffer); + + // ---------------------------------------------------------------------------- + // Display information about the Rips complex + // ---------------------------------------------------------------------------- + output_stream << "Rips complex is of dimension " << stree.dimension() << + " - " << stree.num_simplices() << " simplices." << std::endl; + + ouput_file_stream.close(); + return 0; +} diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index 10ef39d7..b318acb4 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -1,9 +1,12 @@ #ifndef FAKE_SIMPLEX_TREE_H #define FAKE_SIMPLEX_TREE_H +#include #include + #include + namespace Gudhi { class Fake_simplex_tree : public Filtered_toplex_map { @@ -12,7 +15,7 @@ public: typedef Vertex Vertex_handle; - typedef Simplex_ptr Simplex_handle; + typedef Simplex Simplex_handle; typedef void Insertion_result_type; @@ -36,20 +39,16 @@ public: void set_dimension(int d); - Simplex simplex_vertex_range(Simplex_ptr &sptr) const; + Simplex simplex_vertex_range(const Simplex& s) const; - std::vector max_simplices() const; + std::vector max_simplices() const; - std::unordered_set filtration_simplex_range() const; + std::vector filtration_simplex_range() const; - std::unordered_set skeleton_simplex_range(int d=std::numeric_limits::max()) const; + std::vector skeleton_simplex_range(int d=std::numeric_limits::max()) const; std::size_t dimension(Simplex_ptr& sptr) const; - void assign_filtration(Simplex_ptr& f_simplex, Filtration_value alpha_complex_filtration); - - void make_filtration_non_decreasing(); - protected: /** \internal Does all the facets of the given simplex belong to the complex ? @@ -65,28 +64,34 @@ void Fake_simplex_tree::set_dimension(int d){ template void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - typename boost::graph_traits::edge_iterator e_it, - e_it_end; + if (boost::num_vertices(skel_graph) == 0) return; + typename boost::graph_traits::vertex_iterator v_it, v_it_end; + for (std::tie(v_it, v_it_end) = boost::vertices(skel_graph); v_it != v_it_end; ++v_it){ + Simplex s; + s.insert(*v_it); + insert_simplex_and_subfaces(s, boost::get(vertex_filtration_t(), skel_graph, *v_it)); + } + + typename boost::graph_traits::edge_iterator e_it, e_it_end; for (std::tie(e_it, e_it_end) = boost::edges(skel_graph); e_it != e_it_end; ++e_it) { - auto u = source(*e_it, skel_graph); - auto v = target(*e_it, skel_graph); - if(u, Toplex_map::Sptr_hash, Toplex_map::Sptr_equal> facets_to_max; + std::unordered_map, Sptr_hash, Sptr_equal> facets_to_max; for(const auto& kv : filtrations){ Simplex sigma (*(kv.first)); - for(Vertex v : sigma){ - sigma.erase(v); - auto sptr = get_key(sigma); - if(!facets_to_max.count(sptr)) facets_to_max.emplace(sptr, std::vector()); - facets_to_max.at(sptr).emplace_back(v); - sigma.insert(v); - } + if(sigma.size()>1) + for(Vertex v : *(kv.first)){ + sigma.erase(v); + auto sptr = get_key(sigma); + if(!facets_to_max.count(sptr)) + facets_to_max.emplace(sptr, std::vector()); + facets_to_max.at(sptr).emplace_back(v); + sigma.insert(v); + } } for(const auto& kv : facets_to_max){ std::unordered_set facets(kv.second.begin(), kv.second.end()); @@ -132,11 +139,12 @@ std::size_t Fake_simplex_tree::dimension() const { std::size_t max = 0; for(auto kv : filtrations) max = std::max(max, kv.first->size()); - return max; + return max-1; } std::size_t Fake_simplex_tree::num_simplices() const { - return filtration_simplex_range().size(); + //return filtration_simplex_range().size(); + return max_simplices().size(); } std::size_t Fake_simplex_tree::num_vertices() const { @@ -147,42 +155,40 @@ std::size_t Fake_simplex_tree::num_vertices() const { return vertices.size(); } -Simplex Fake_simplex_tree::simplex_vertex_range(Simplex_ptr& sptr) const { - return *sptr; +Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { + return s; } -std::unordered_set Fake_simplex_tree::filtration_simplex_range() const{ - std::vector m = max_simplices(); - std::cout << m.size()<< std::endl; - std::cout << m.size()<< std::endl; - - std::cout << m.size()<< std::endl; - - std::unordered_set seen; +std::vector Fake_simplex_tree::filtration_simplex_range() const{ + std::vector m = max_simplices(); + std::vector seen1; + Simplex_ptr_set seen2; while(m.begin()!=m.end()){ - Simplex_ptr& sptr = m.back(); + Simplex s(m.back()); m.pop_back(); - if(seen.find(sptr)!=seen.end()){ - seen.emplace(sptr); - for(Simplex& sigma : facets(*sptr)) - m.emplace_back(get_key(sigma)); + if(seen2.find(get_key(s))==seen2.end()){ + seen1.emplace_back(s); + seen2.emplace(get_key(s)); + if(s.size()>0) + for(Simplex& sigma : facets(s)) + m.emplace_back(sigma); } } - return seen; + return seen1; } -std::unordered_set Fake_simplex_tree::skeleton_simplex_range(int d) const{ - std::unordered_set simplices; - for(auto sptr: filtration_simplex_range()) - if(sptr->size()<=d) - simplices.emplace(sptr); +std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ + std::vector simplices; + for(auto s: filtration_simplex_range()) + if(s.size()<=d) + simplices.emplace_back(s); return simplices; } -std::vector Fake_simplex_tree::max_simplices() const{ - std::vector s; +std::vector Fake_simplex_tree::max_simplices() const{ + std::vector s; for(auto kv : filtrations) - s.emplace_back(kv.first); + s.emplace_back(*(kv.first)); return s; } @@ -190,28 +196,6 @@ std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ return sptr->size(); } - -void Fake_simplex_tree::assign_filtration(Simplex_ptr& f_simplex, Filtration_value alpha_complex_filtration){ - filtrations.emplace(f_simplex,alpha_complex_filtration); -} - -void Fake_simplex_tree::make_filtration_non_decreasing(){ - for(auto yt = filtrations.begin(); yt != filtrations.end(); ++yt) - for (auto it = toplex_maps.begin(); it != toplex_maps.end(); ++it){ - if(it->first == yt -> second) - break; - if(it->second.membership(*(yt->first))) - for(const Simplex_ptr& sptr : it->second.maximal_cofaces(*(yt->first))){ - it->second.erase_maximal(sptr); - toplex_maps.at(yt->second).insert_simplex(*sptr); - filtrations.emplace(sptr,yt->second); - } - } - -} - - - } //namespace Gudhi #endif /* FAKE_SIMPLEX_TREE_H */ diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index 4b626f11..6d89c062 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -8,11 +8,11 @@ namespace Gudhi { -typedef double Filtration_value; - class Filtered_toplex_map { public: + typedef double Filtration_value; + template void insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = filtration_upper_bound); @@ -21,7 +21,7 @@ public: protected: std::unordered_map toplex_maps; - std::unordered_map filtrations; + std::unordered_map filtrations; }; @@ -33,7 +33,7 @@ void Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range & } template -Filtration_value Filtered_toplex_map::filtration(const Input_vertex_range &vertex_range) const{ +Filtered_toplex_map::Filtration_value Filtered_toplex_map::filtration(const Input_vertex_range &vertex_range) const{ for(auto kv : toplex_maps) if(kv.second.membership(vertex_range)) return kv.first; diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index 0b6cad37..9de3a6be 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -5,6 +5,7 @@ #include #include #include +#include #define vertex_upper_bound std::numeric_limits::max() @@ -18,22 +19,22 @@ typedef std::size_t Vertex; * \ingroup toplex_map */ typedef std::unordered_set Simplex; +/** The type of the pointers to maximal simplices. + * \ingroup toplex_map */ +typedef std::shared_ptr Simplex_ptr; + +struct Sptr_hash{ std::size_t operator()(const Simplex_ptr& s) const; }; +struct Sptr_equal{ std::size_t operator()(const Simplex_ptr& a, const Simplex_ptr& b) const; }; +/** The type of the sets of Simplex_ptr. + * \ingroup toplex_map */ +typedef std::unordered_set Simplex_ptr_set; + /** A Toplex_map represents the simplicial complex. * A "toplex" is a maximal simplex. * \ingroup toplex_map */ class Toplex_map { public: - /** The type of the pointers to maximal simplices. - * \ingroup toplex_map */ - typedef std::shared_ptr Simplex_ptr; - - struct Sptr_hash{ std::size_t operator()(const Simplex_ptr& s) const; }; - struct Sptr_equal{ std::size_t operator()(const Simplex_ptr& a, const Simplex_ptr& b) const; }; - /** The type of the sets of Simplex_ptr. - * \ingroup toplex_map */ - typedef std::unordered_set Simplex_ptr_set; - /** \brief Adds the given simplex to the complex. * Nothing happens if the simplex has a coface in the complex. * \ingroup toplex_map */ @@ -75,7 +76,6 @@ public: template void insert_independent_simplex(const Input_vertex_range &vertex_range); - /** \internal Removes a toplex without adding facets after. * \ingroup toplex_map */ void erase_maximal(const Simplex_ptr& sptr); @@ -98,12 +98,8 @@ protected: /** \internal The map from vertices to toplices * \ingroup toplex_map */ std::unordered_map t0; - }; -typedef Toplex_map::Simplex_ptr Simplex_ptr; -typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; - // Pointers are also used as key in the hash sets. template Simplex_ptr get_key(const Input_vertex_range &vertex_range); @@ -261,13 +257,13 @@ Vertex Toplex_map::best_index(const Input_vertex_range &vertex_range) const{ return arg_min; } -std::size_t Toplex_map::Sptr_equal::operator()(const Simplex_ptr& s1, const Simplex_ptr& s2) const { +std::size_t Sptr_equal::operator()(const Simplex_ptr& s1, const Simplex_ptr& s2) const { if (s1->size() != s2->size()) return false; return included(*s1,*s2); // inclusion tests equality for same size simplices } -std::size_t Toplex_map::Sptr_hash::operator()(const Simplex_ptr& s) const { +std::size_t Sptr_hash::operator()(const Simplex_ptr& s) const { std::hash h_f; //double hash works better than int hash size_t h = 0; diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt index cbc53902..0f709409 100644 --- a/src/Witness_complex/example/CMakeLists.txt +++ b/src/Witness_complex/example/CMakeLists.txt @@ -14,6 +14,7 @@ install(TARGETS Witness_complex_example_nearest_landmark_table DESTINATION bin) if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) add_executable( Witness_complex_example_off example_witness_complex_off.cpp ) add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp ) +add_executable( Witness_complex_example_strong_fvecs example_strong_witness_complex_fvecs.cpp ) add_executable ( Witness_complex_example_sphere example_witness_complex_sphere.cpp ) add_executable ( Witness_complex_example_witness_persistence example_witness_complex_persistence.cpp ) @@ -44,6 +45,7 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) install(TARGETS Witness_complex_example_off DESTINATION bin) install(TARGETS Witness_complex_example_strong_off DESTINATION bin) + install(TARGETS Witness_complex_example_strong_fvecs DESTINATION bin) install(TARGETS Witness_complex_example_sphere DESTINATION bin) install(TARGETS Witness_complex_example_witness_persistence DESTINATION bin) install(TARGETS Witness_complex_example_strong_witness_persistence DESTINATION bin) diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp index 4a232481..6292e248 100644 --- a/src/Witness_complex/example/example_strong_witness_complex_off.cpp +++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp @@ -50,8 +50,8 @@ int main(int argc, char * const argv[]) { int nbL = atoi(argv[2]), lim_dim = atoi(argv[4]); double alpha2 = atof(argv[3]); clock_t start, end; - //Gudhi::Simplex_tree<> simplex_tree; - Gudhi::Fake_simplex_tree simplex_tree; + Gudhi::Simplex_tree<> simplex_tree; + //Gudhi::Fake_simplex_tree simplex_tree; // Read the point file Point_vector point_vector, landmarks; -- cgit v1.2.3 From 03566e8a3a7f52f180bfa643b801f302c033f3fa Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 26 Oct 2017 15:20:03 +0000 Subject: boost clique algorithm for ribs git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2808 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 99ab7e5ce4cf1de7c710333ca1328c552499a446 --- .../example/example_rips_complex_from_fvecs.cpp | 71 ++++++-------- src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 108 ++++++--------------- src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 15 ++- 3 files changed, 74 insertions(+), 120 deletions(-) diff --git a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp index c05d038a..5e7667bd 100644 --- a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp +++ b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp @@ -10,58 +10,49 @@ #include #include #include +#include void usage(int nbArgs, char * const progName) { - std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.fvecs threshold dim_max [ouput_file.txt]\n"; - std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.fvecs 60.0\n"; - exit(-1); // ----- >> + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " filename.fvecs threshold dim_max [ouput_file.txt]\n"; + std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.fvecs 60.0\n"; + exit(-1); // ----- >> } int main(int argc, char **argv) { - if ((argc != 4) && (argc != 5)) usage(argc, (argv[0] - 1)); + if (argc != 4) usage(argc, (argv[0] - 1)); - std::string file_name(argv[1]); - double threshold = atof(argv[2]); - int dim_max = atoi(argv[3]); + std::string file_name(argv[1]); + double threshold = atof(argv[2]); + int dim_max = atoi(argv[3]); - // Type definitions - using K = CGAL::Epick_d; - using Point = typename K::Point_d; - //using Simplex_tree = Gudhi::Simplex_tree<>; - using Simplex_tree = Gudhi::Fake_simplex_tree; - using Filtration_value = Simplex_tree::Filtration_value; - using Rips_complex = Gudhi::rips_complex::Rips_complex; - using Point_vector = std::vector; + // Type definitions + using K = CGAL::Epick_d; + using Point = typename K::Point_d; + //using Simplex_tree = Gudhi::Simplex_tree<>; + using Simplex_tree = Gudhi::Fake_simplex_tree; + using Filtration_value = Simplex_tree::Filtration_value; + using Rips_complex = Gudhi::rips_complex::Rips_complex; + using Point_vector = std::vector; - // ---------------------------------------------------------------------------- - // Init of a Rips complex from an fvecs file - // ---------------------------------------------------------------------------- - Point_vector point_vector; - Gudhi::load_points_from_fvecs_file(file_name, std::back_insert_iterator< Point_vector >(point_vector)); + // ---------------------------------------------------------------------------- + // Init of a Rips complex from an fvecs file + // ---------------------------------------------------------------------------- + Point_vector point_vector; + Gudhi::load_points_from_fvecs_file(file_name, std::back_insert_iterator< Point_vector >(point_vector)); - Rips_complex rips_complex_from_file(point_vector, threshold, Gudhi::Euclidean_distance()); + Rips_complex rips_complex_from_file(point_vector, threshold, Gudhi::Euclidean_distance()); - std::streambuf* streambufffer; - std::ofstream ouput_file_stream; - if (argc == 5) { - ouput_file_stream.open(std::string(argv[4])); - streambufffer = ouput_file_stream.rdbuf(); - } else { - streambufffer = std::cout.rdbuf(); - } + Simplex_tree stree; - Simplex_tree stree; - rips_complex_from_file.create_complex(stree, dim_max); - std::ostream output_stream(streambufffer); + clock_t start, end; + start = clock(); + rips_complex_from_file.create_complex(stree, dim_max); + end = clock(); - // ---------------------------------------------------------------------------- - // Display information about the Rips complex - // ---------------------------------------------------------------------------- - output_stream << "Rips complex is of dimension " << stree.dimension() << - " - " << stree.num_simplices() << " simplices." << std::endl; + std::cout << "Strong witness complex took "<< static_cast(end - start) / CLOCKS_PER_SEC << " s." << std::endl; + std::cout << "Rips complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices." << std::endl; - ouput_file_stream.close(); - return 0; + return 0; } diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index b318acb4..6a0782ea 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -5,10 +5,26 @@ #include #include +#include +#define filtration_upper_bound std::numeric_limits::max() namespace Gudhi { +struct Visitor { + Toplex_map* tm; + + Visitor(Toplex_map* tm) + :tm(tm) + {} + + template + void clique(const Clique& c, const Graph& g) + { + tm->insert_simplex(c); + } +}; + class Fake_simplex_tree : public Filtered_toplex_map { public: @@ -31,14 +47,12 @@ public: /** \brief Returns the number of vertices in the simplicial complex. */ std::size_t num_vertices() const; - Simplex_ptr_set candidates() const; + Simplex_ptr_set candidates(int min_dim=-1) const; std::size_t dimension() const; std::size_t num_simplices() const; - void set_dimension(int d); - Simplex simplex_vertex_range(const Simplex& s) const; std::vector max_simplices() const; @@ -58,87 +72,26 @@ protected: }; -void Fake_simplex_tree::set_dimension(int d){ - -} - template void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - if (boost::num_vertices(skel_graph) == 0) return; - typename boost::graph_traits::vertex_iterator v_it, v_it_end; - for (std::tie(v_it, v_it_end) = boost::vertices(skel_graph); v_it != v_it_end; ++v_it){ - Simplex s; - s.insert(*v_it); - insert_simplex_and_subfaces(s, boost::get(vertex_filtration_t(), skel_graph, *v_it)); - } - - typename boost::graph_traits::edge_iterator e_it, e_it_end; - for (std::tie(e_it, e_it_end) = boost::edges(skel_graph); e_it != e_it_end; ++e_it) { - Vertex u = source(*e_it, skel_graph); - Vertex v = target(*e_it, skel_graph); - if (u < v) { - Simplex s; - s.insert(u); - s.insert(v); - insert_simplex_and_subfaces(s, boost::get(edge_filtration_t(), skel_graph, *e_it)); - } - } + toplex_maps.emplace(filtration_upper_bound,Toplex_map()); + bron_kerbosch_all_cliques(skel_graph, Visitor(&(this->toplex_maps.at(filtration_upper_bound)))); } -void Fake_simplex_tree::expansion(int max_dim){ - for(int d=2; d <= max_dim; d++){ - Simplex_ptr_set cs = candidates(); //dimension ? - if(cs.empty()) std::cout << d << std::endl; - if(cs.empty()) return; - for(const Simplex_ptr& sptr: cs) - insert_simplex_and_subfaces(*sptr); //filtration ? - } -} +void Fake_simplex_tree::expansion(int max_dim){} template bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ Simplex sigma(vertex_range); for(const Simplex& s : facets(sigma)) - if(!filtrations.count(get_key(s))) return false; + if(!membership(s)) return false; return true; } -Simplex_ptr_set Fake_simplex_tree::candidates() const{ - Simplex_ptr_set c; - std::unordered_map, Sptr_hash, Sptr_equal> facets_to_max; - for(const auto& kv : filtrations){ - Simplex sigma (*(kv.first)); - if(sigma.size()>1) - for(Vertex v : *(kv.first)){ - sigma.erase(v); - auto sptr = get_key(sigma); - if(!facets_to_max.count(sptr)) - facets_to_max.emplace(sptr, std::vector()); - facets_to_max.at(sptr).emplace_back(v); - sigma.insert(v); - } - } - for(const auto& kv : facets_to_max){ - std::unordered_set facets(kv.second.begin(), kv.second.end()); - for(Vertex v : kv.second){ - facets.erase(v); - for(Vertex w : facets){ - Simplex sigma(*(kv.first)); - sigma.insert(v); - sigma.insert(w); - if(all_facets_inside(sigma)) - c.emplace(get_key(sigma)); - } - facets.emplace(v); - } - } - return c; -} - std::size_t Fake_simplex_tree::dimension() const { std::size_t max = 0; - for(auto kv : filtrations) - max = std::max(max, kv.first->size()); + for(const Simplex& s : max_simplices()) + max = std::max(max, s.size()); return max-1; } @@ -149,8 +102,8 @@ std::size_t Fake_simplex_tree::num_simplices() const { std::size_t Fake_simplex_tree::num_vertices() const { std::unordered_set vertices; - for(auto kv : filtrations) - for (Vertex v : *(kv.first)) + for(const Simplex& s : max_simplices()) + for (Vertex v : s) vertices.emplace(v); return vertices.size(); } @@ -179,17 +132,18 @@ std::vector Fake_simplex_tree::filtration_simplex_range() const{ std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ std::vector simplices; - for(auto s: filtration_simplex_range()) + for(const Simplex& s : max_simplices()) if(s.size()<=d) simplices.emplace_back(s); return simplices; } std::vector Fake_simplex_tree::max_simplices() const{ - std::vector s; - for(auto kv : filtrations) - s.emplace_back(*(kv.first)); - return s; + std::vector max_s; + for(auto kv : toplex_maps) + for(const Simplex_ptr& sptr : kv.second.maximal_cofaces(Simplex())) + max_s.emplace_back(*sptr); + return max_s; } std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index 6d89c062..5bf50fc5 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -19,19 +19,20 @@ public: template Filtration_value filtration(const Input_vertex_range &vertex_range) const; + template + bool membership(const Input_vertex_range &vertex_range) const; + protected: std::unordered_map toplex_maps; - std::unordered_map filtrations; - }; template void Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f){ if(!toplex_maps.count(f)) toplex_maps.emplace(f,Toplex_map()); toplex_maps.at(f).insert_simplex(vertex_range); - filtrations.emplace(get_key(vertex_range),f); } + template Filtered_toplex_map::Filtration_value Filtered_toplex_map::filtration(const Input_vertex_range &vertex_range) const{ for(auto kv : toplex_maps) @@ -40,6 +41,14 @@ Filtered_toplex_map::Filtration_value Filtered_toplex_map::filtration(const Inpu return filtration_upper_bound; } +template +bool Filtered_toplex_map::membership(const Input_vertex_range &vertex_range) const{ + for(auto kv : toplex_maps) + if(kv.second.membership(vertex_range)) + return true; + return false; +} + } //namespace Gudhi #endif /* FILTERED_TOPLEX_MAP_H */ -- cgit v1.2.3 From e5a2be33e10b0653258252451330c021fa0bc204 Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 26 Oct 2017 15:57:09 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2809 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 67f6ba752b6284763cdc706d5dec3e100386c602 --- src/Tangential_complex/example/example_basic.cpp | 7 +++++-- src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/Tangential_complex/example/example_basic.cpp b/src/Tangential_complex/example/example_basic.cpp index 4f2b859e..39165397 100644 --- a/src/Tangential_complex/example/example_basic.cpp +++ b/src/Tangential_complex/example/example_basic.cpp @@ -1,5 +1,7 @@ #include #include +#include + #include #include @@ -20,7 +22,7 @@ CGAL::Parallel_tag> TC; int main(void) { const int INTRINSIC_DIM = 2; const int AMBIENT_DIM = 3; - const int NUM_POINTS = 1000; + const int NUM_POINTS = 100; Kernel k; @@ -36,7 +38,8 @@ int main(void) { tc.compute_tangential_complex(); // Export the TC into a Simplex_tree - Gudhi::Simplex_tree<> stree; + //Gudhi::Simplex_tree<> stree; + Gudhi::Fake_simplex_tree stree; tc.create_complex(stree); // Display stats about inconsistencies diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index 5bf50fc5..3a0064dc 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -14,7 +14,7 @@ public: typedef double Filtration_value; template - void insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = filtration_upper_bound); + std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = filtration_upper_bound); template Filtration_value filtration(const Input_vertex_range &vertex_range) const; @@ -27,9 +27,12 @@ protected: }; template -void Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f){ +std::pair Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f){ + Simplex s(vertex_range.begin(),vertex_range.end()); + if(membership(s)) return make_pair(s,false); if(!toplex_maps.count(f)) toplex_maps.emplace(f,Toplex_map()); toplex_maps.at(f).insert_simplex(vertex_range); + return make_pair(s,true); } -- cgit v1.2.3 From 0a0c72f24969de374396378bd0fb82af6f0bdbc5 Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 16 Nov 2017 14:25:10 +0000 Subject: file added git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2891 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 4a89b348bd830123e99ed3f29b1b62526fbc5bbe --- .../example_strong_witness_complex_fvecs.cpp | 79 ++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp diff --git a/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp new file mode 100644 index 00000000..a8e16fb0 --- /dev/null +++ b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp @@ -0,0 +1,79 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Siargey Kachanovich + * + * Copyright (C) 2016 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include + +using K = CGAL::Epick_d; +using Point_d = typename K::Point_d; +using Witness_complex = Gudhi::witness_complex::Euclidean_strong_witness_complex; +using Point_vector = std::vector; + +int main(int argc, char * const argv[]) { + if (argc != 5) { + std::cerr << "Usage: " << argv[0] + << " path_to_point_file number_of_landmarks max_squared_alpha limit_dimension\n"; + return 0; + } + + std::string file_name = argv[1]; + int nbL = atoi(argv[2]), lim_dim = atoi(argv[4]); + double alpha2 = atof(argv[3]); + clock_t start, end; + //Gudhi::Simplex_tree<> simplex_tree; + Gudhi::Fake_simplex_tree simplex_tree; + + // Read the point file + Point_vector point_vector, landmarks; + Gudhi::load_points_from_fvecs_file(file_name, std::back_insert_iterator< Point_vector >(point_vector)); + + + std::cout << "Successfully read " << point_vector.size() << " points.\n"; + std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n"; + + // Choose landmarks + Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks)); + + // Compute witness complex + start = clock(); + Witness_complex witness_complex(landmarks, + point_vector); + + witness_complex.create_complex(simplex_tree, alpha2, lim_dim); + end = clock(); + std::cout << "Strong witness complex took " + << static_cast(end - start) / CLOCKS_PER_SEC << " s. \n"; + std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << std::endl; + std::cout << "Max dimension is : " << simplex_tree.dimension() << std::endl; + +} -- cgit v1.2.3 From 8d3be7f7008eb06b001797510c965a2b2a4009a9 Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 16 Nov 2017 15:12:14 +0000 Subject: set dimension deprecated, no ? git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2892 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 84029be094a63f148cdadbd412aab630be9cd8db --- src/Witness_complex/include/gudhi/Strong_witness_complex.h | 1 - src/Witness_complex/include/gudhi/Witness_complex.h | 1 - 2 files changed, 2 deletions(-) diff --git a/src/Witness_complex/include/gudhi/Strong_witness_complex.h b/src/Witness_complex/include/gudhi/Strong_witness_complex.h index 6f4bcf60..c18335d3 100644 --- a/src/Witness_complex/include/gudhi/Strong_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Strong_witness_complex.h @@ -127,7 +127,6 @@ class Strong_witness_complex { if ((Landmark_id)simplex.size() - 1 > complex_dim) complex_dim = simplex.size() - 1; } - complex.set_dimension(complex_dim); return true; } diff --git a/src/Witness_complex/include/gudhi/Witness_complex.h b/src/Witness_complex/include/gudhi/Witness_complex.h index bcfe8484..53c38520 100644 --- a/src/Witness_complex/include/gudhi/Witness_complex.h +++ b/src/Witness_complex/include/gudhi/Witness_complex.h @@ -130,7 +130,6 @@ class Witness_complex { } k++; } - complex.set_dimension(k-1); return true; } -- cgit v1.2.3 From bf84494b3e7f3d2a36661b66defb131e515cdc5b Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 21 Nov 2017 18:38:25 +0000 Subject: ... git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2926 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d872eef9342046002b81f55cc12760e799df0c1d --- src/Toplex_map/benchmarks/CMakeLists.txt | 4 + src/Toplex_map/benchmarks/chrono.cpp | 137 +++++++++++++++++++++ src/Toplex_map/doc/Intro_Toplex_map.h | 8 +- src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 69 +++++------ src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 11 +- 5 files changed, 182 insertions(+), 47 deletions(-) create mode 100644 src/Toplex_map/benchmarks/CMakeLists.txt create mode 100644 src/Toplex_map/benchmarks/chrono.cpp diff --git a/src/Toplex_map/benchmarks/CMakeLists.txt b/src/Toplex_map/benchmarks/CMakeLists.txt new file mode 100644 index 00000000..2341fe06 --- /dev/null +++ b/src/Toplex_map/benchmarks/CMakeLists.txt @@ -0,0 +1,4 @@ +cmake_minimum_required(VERSION 2.6) +project(Toplex_map_examples) + +add_executable(chrono chrono.cpp) diff --git a/src/Toplex_map/benchmarks/chrono.cpp b/src/Toplex_map/benchmarks/chrono.cpp new file mode 100644 index 00000000..d93d1e1f --- /dev/null +++ b/src/Toplex_map/benchmarks/chrono.cpp @@ -0,0 +1,137 @@ +#include +#include +#include + +#include +#include + +using namespace Gudhi; + +typedef Simplex typeVectorVertex; +typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool; + +class ST_wrapper { + +public: + void insert_simplex(const Simplex& tau); + bool membership(const Simplex& tau); + Vertex contraction(const Vertex x, const Vertex y); + std::size_t num_simplices(); + +private: + Simplex_tree<> simplexTree; + void erase_max(const Simplex& sigma); +}; + +void ST_wrapper::insert_simplex(const Simplex& tau){ + simplexTree.insert_simplex_and_subfaces(tau); +} + +bool ST_wrapper::membership(const Simplex& tau) { + return simplexTree.find(tau) != simplexTree.null_simplex(); +} + +void ST_wrapper::erase_max(const Simplex& sigma){ + if(membership(sigma)) + simplexTree.remove_maximal_simplex(simplexTree.find(sigma)); +} + +Vertex ST_wrapper::contraction(const Vertex x, const Vertex y){ + Simplex sx; sx.insert(x); + auto hx = simplexTree.find(sx); + if(hx != simplexTree.null_simplex()) + for(auto h : simplexTree.cofaces_simplex_range(hx,0)){ + auto sr = simplexTree.simplex_vertex_range(h); + Simplex sigma(sr.begin(),sr.end()); + erase_max(sigma); + sigma.erase(x); + sigma.insert(y); + insert_simplex(sigma); + } + return y; +} + +std::size_t ST_wrapper::num_simplices(){ + return simplexTree.num_simplices(); +} + + + +int n = 300; + +int nb_insert_simplex1 = 3000; +int nb_membership1 = 4000; +int nb_contraction = 300; +int nb_insert_simplex2 = 3000; +int nb_membership2 = 400000; + +Simplex random_simplex(int n, int d){ + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution<> dis(1, n); + Simplex s; + while(s.size()!=d) + s.insert(dis(gen)); + return s; +} + +std::vector r_vector_simplices(int n, int max_d, int m){ + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution<> dis(1, max_d); + std::vector v; + for(int i=0; i +void chrono(int n, int d){ + complex_type K; + std::vector simplices_insert_simplex1 = r_vector_simplices(n,d,nb_insert_simplex1); + std::vector simplices_membership1 = r_vector_simplices(n,d,nb_membership1); + std::vector simplices_insert_simplex2 = r_vector_simplices(n - 2*nb_contraction,d,nb_insert_simplex2); + std::vector simplices_membership2 = r_vector_simplices(n - 2*nb_contraction,d,nb_membership2); + std::chrono::time_point start, end; + + for(const Simplex& s : simplices_insert_simplex1) + K.insert_simplex(s); + + for(const Simplex& s : simplices_membership1) + K.membership(s); + + start = std::chrono::system_clock::now(); + for(int i = 0; i<=nb_contraction; i++) + K.contraction(n-2*i,n-2*i-1); + end = std::chrono::system_clock::now(); + auto c3 = std::chrono::duration_cast(end-start).count(); + + start = std::chrono::system_clock::now(); + for(const Simplex& s : simplices_insert_simplex2) + K.insert_simplex(s); + end = std::chrono::system_clock::now(); + auto c1 = std::chrono::duration_cast(end-start).count(); + + start = std::chrono::system_clock::now(); + for(const Simplex& s : simplices_membership2) + K.membership(s); + end = std::chrono::system_clock::now(); + auto c2 = std::chrono::duration_cast(end-start).count(); + + std::cout << c1 << "\t \t" << c2 << "\t \t" << c3 << "\t \t" << K.num_simplices() << std::endl; +} + +int main(){ + for(int d=5;d<=40;d+=5){ + std::cout << "d=" << d << " \t Insertions \t Membership \t Contractions \t Size" << std::endl; + std::cout << "T Map \t \t"; + chrono(n,d); + std::cout << "Lazy \t \t"; + chrono(n,d); + if(d<=15){ + std::cout << "ST \t \t"; + chrono(n,d); + } + std::cout << std::endl; + } +} diff --git a/src/Toplex_map/doc/Intro_Toplex_map.h b/src/Toplex_map/doc/Intro_Toplex_map.h index da9562ec..6f4c1a1b 100644 --- a/src/Toplex_map/doc/Intro_Toplex_map.h +++ b/src/Toplex_map/doc/Intro_Toplex_map.h @@ -33,15 +33,15 @@ namespace Gudhi { * * \section toplexmapdefinition Definition * - * Let's consider a simplicial complex, denote by $d$ its dimension - * and by $k$ its number of maximal simplices. - * Furthermore, denote by $\gamma_0$ the maximal number of toplices, i.e. maximal simplices, + * Let's consider a simplicial complex, denote by \f$d\f$ its dimension + * and by \f$k\f$ its number of maximal simplices. + * Furthermore, denote by \f$\gamma_0\f$ the maximal number of toplices, i.e. maximal simplices, * that contain a same vertex. * * The goal of the Toplex Map is both to represent the complex in optimal * O(kd) space and to provide fast standard operations such as : insertion, removal * and membership of a simplex, contraction of an edge, collapses. The time needed - * for these operation is linear or quadratic in $\gamma_0$ and $d$. + * for these operation is linear or quadratic in \f$\gamma_0\f$ and \f$d\f$. * * Toplex map is composed firstly of a raw storage of toplices and secondly of a * map which associate any vertex to a set of pointers toward all toplices diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index 6a0782ea..3de962af 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -1,14 +1,14 @@ #ifndef FAKE_SIMPLEX_TREE_H #define FAKE_SIMPLEX_TREE_H +#include + #include #include #include #include -#define filtration_upper_bound std::numeric_limits::max() - namespace Gudhi { struct Visitor { @@ -35,33 +35,31 @@ public: typedef void Insertion_result_type; - /** \brief Inserts a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` in the simplicial + /** \brief Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` in the simplicial * complex. */ template void insert_graph(const OneSkeletonGraph& skel_graph); - /** \brief Expands the simplicial complex containing only its one skeleton until a given maximal dimension as - * explained in \ref ripsdefinition. */ + /** \brief Do nothing */ void expansion(int max_dim); - /** \brief Returns the number of vertices in the simplicial complex. */ + /** \brief Returns the number of vertices stored i.e. the number of max simplices */ std::size_t num_vertices() const; - Simplex_ptr_set candidates(int min_dim=-1) const; - std::size_t dimension() const; + std::size_t dimension(Simplex_ptr& sptr) const; + std::size_t num_simplices() const; Simplex simplex_vertex_range(const Simplex& s) const; std::vector max_simplices() const; - std::vector filtration_simplex_range() const; + std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; - std::vector skeleton_simplex_range(int d=std::numeric_limits::max()) const; + std::vector skeleton_simplex_range(int d) const; - std::size_t dimension(Simplex_ptr& sptr) const; protected: @@ -74,8 +72,8 @@ protected: template void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - toplex_maps.emplace(filtration_upper_bound,Toplex_map()); - bron_kerbosch_all_cliques(skel_graph, Visitor(&(this->toplex_maps.at(filtration_upper_bound)))); + toplex_maps.emplace(nan(""),Toplex_map()); + bron_kerbosch_all_cliques(skel_graph, Visitor(&(this->toplex_maps.at(nan(""))))); } void Fake_simplex_tree::expansion(int max_dim){} @@ -95,6 +93,10 @@ std::size_t Fake_simplex_tree::dimension() const { return max-1; } +std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ + return sptr->size(); +} + std::size_t Fake_simplex_tree::num_simplices() const { //return filtration_simplex_range().size(); return max_simplices().size(); @@ -112,42 +114,35 @@ Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { return s; } -std::vector Fake_simplex_tree::filtration_simplex_range() const{ +std::vector Fake_simplex_tree::max_simplices() const{ + std::vector max_s; + for(auto kv : toplex_maps) + for(const Simplex_ptr& sptr : kv.second.maximal_cofaces(Simplex())) + max_s.emplace_back(*sptr); + return max_s; +} + +std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ std::vector m = max_simplices(); - std::vector seen1; - Simplex_ptr_set seen2; + std::vector range; + Simplex_ptr_set seen; while(m.begin()!=m.end()){ Simplex s(m.back()); m.pop_back(); - if(seen2.find(get_key(s))==seen2.end()){ - seen1.emplace_back(s); - seen2.emplace(get_key(s)); + if(seen.find(get_key(s))==seen.end()){ + if(s.size()-1<=d) + range.emplace_back(s); + seen.emplace(get_key(s)); if(s.size()>0) for(Simplex& sigma : facets(s)) m.emplace_back(sigma); } } - return seen1; + return range; } std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ - std::vector simplices; - for(const Simplex& s : max_simplices()) - if(s.size()<=d) - simplices.emplace_back(s); - return simplices; -} - -std::vector Fake_simplex_tree::max_simplices() const{ - std::vector max_s; - for(auto kv : toplex_maps) - for(const Simplex_ptr& sptr : kv.second.maximal_cofaces(Simplex())) - max_s.emplace_back(*sptr); - return max_s; -} - -std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ - return sptr->size(); + return filtration_simplex_range(d); } } //namespace Gudhi diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index 3a0064dc..a0c24304 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -2,10 +2,9 @@ #define FILTERED_TOPLEX_MAP_H #include +#include #include -#define filtration_upper_bound std::numeric_limits::max() - namespace Gudhi { class Filtered_toplex_map { @@ -14,7 +13,7 @@ public: typedef double Filtration_value; template - std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = filtration_upper_bound); + std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = nan("")); template Filtration_value filtration(const Input_vertex_range &vertex_range) const; @@ -23,7 +22,7 @@ public: bool membership(const Input_vertex_range &vertex_range) const; protected: - std::unordered_map toplex_maps; + std::map toplex_maps; }; template @@ -40,8 +39,8 @@ template Filtered_toplex_map::Filtration_value Filtered_toplex_map::filtration(const Input_vertex_range &vertex_range) const{ for(auto kv : toplex_maps) if(kv.second.membership(vertex_range)) - return kv.first; - return filtration_upper_bound; + return kv.first; //min only because a map is ordered + return nan(""); } template -- cgit v1.2.3 From a4677295cf1dd3a8e02dd135348b321eae044104 Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 21 Nov 2017 18:38:45 +0000 Subject: ... git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2927 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 1b3f9508c2697619d27776a9824defaa02a1420f --- src/Toplex_map/example/CMakeLists.txt | 4 - src/Toplex_map/example/chrono.cpp | 137 ---------------------------------- 2 files changed, 141 deletions(-) delete mode 100644 src/Toplex_map/example/CMakeLists.txt delete mode 100644 src/Toplex_map/example/chrono.cpp diff --git a/src/Toplex_map/example/CMakeLists.txt b/src/Toplex_map/example/CMakeLists.txt deleted file mode 100644 index 2341fe06..00000000 --- a/src/Toplex_map/example/CMakeLists.txt +++ /dev/null @@ -1,4 +0,0 @@ -cmake_minimum_required(VERSION 2.6) -project(Toplex_map_examples) - -add_executable(chrono chrono.cpp) diff --git a/src/Toplex_map/example/chrono.cpp b/src/Toplex_map/example/chrono.cpp deleted file mode 100644 index d93d1e1f..00000000 --- a/src/Toplex_map/example/chrono.cpp +++ /dev/null @@ -1,137 +0,0 @@ -#include -#include -#include - -#include -#include - -using namespace Gudhi; - -typedef Simplex typeVectorVertex; -typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool; - -class ST_wrapper { - -public: - void insert_simplex(const Simplex& tau); - bool membership(const Simplex& tau); - Vertex contraction(const Vertex x, const Vertex y); - std::size_t num_simplices(); - -private: - Simplex_tree<> simplexTree; - void erase_max(const Simplex& sigma); -}; - -void ST_wrapper::insert_simplex(const Simplex& tau){ - simplexTree.insert_simplex_and_subfaces(tau); -} - -bool ST_wrapper::membership(const Simplex& tau) { - return simplexTree.find(tau) != simplexTree.null_simplex(); -} - -void ST_wrapper::erase_max(const Simplex& sigma){ - if(membership(sigma)) - simplexTree.remove_maximal_simplex(simplexTree.find(sigma)); -} - -Vertex ST_wrapper::contraction(const Vertex x, const Vertex y){ - Simplex sx; sx.insert(x); - auto hx = simplexTree.find(sx); - if(hx != simplexTree.null_simplex()) - for(auto h : simplexTree.cofaces_simplex_range(hx,0)){ - auto sr = simplexTree.simplex_vertex_range(h); - Simplex sigma(sr.begin(),sr.end()); - erase_max(sigma); - sigma.erase(x); - sigma.insert(y); - insert_simplex(sigma); - } - return y; -} - -std::size_t ST_wrapper::num_simplices(){ - return simplexTree.num_simplices(); -} - - - -int n = 300; - -int nb_insert_simplex1 = 3000; -int nb_membership1 = 4000; -int nb_contraction = 300; -int nb_insert_simplex2 = 3000; -int nb_membership2 = 400000; - -Simplex random_simplex(int n, int d){ - std::random_device rd; - std::mt19937 gen(rd()); - std::uniform_int_distribution<> dis(1, n); - Simplex s; - while(s.size()!=d) - s.insert(dis(gen)); - return s; -} - -std::vector r_vector_simplices(int n, int max_d, int m){ - std::random_device rd; - std::mt19937 gen(rd()); - std::uniform_int_distribution<> dis(1, max_d); - std::vector v; - for(int i=0; i -void chrono(int n, int d){ - complex_type K; - std::vector simplices_insert_simplex1 = r_vector_simplices(n,d,nb_insert_simplex1); - std::vector simplices_membership1 = r_vector_simplices(n,d,nb_membership1); - std::vector simplices_insert_simplex2 = r_vector_simplices(n - 2*nb_contraction,d,nb_insert_simplex2); - std::vector simplices_membership2 = r_vector_simplices(n - 2*nb_contraction,d,nb_membership2); - std::chrono::time_point start, end; - - for(const Simplex& s : simplices_insert_simplex1) - K.insert_simplex(s); - - for(const Simplex& s : simplices_membership1) - K.membership(s); - - start = std::chrono::system_clock::now(); - for(int i = 0; i<=nb_contraction; i++) - K.contraction(n-2*i,n-2*i-1); - end = std::chrono::system_clock::now(); - auto c3 = std::chrono::duration_cast(end-start).count(); - - start = std::chrono::system_clock::now(); - for(const Simplex& s : simplices_insert_simplex2) - K.insert_simplex(s); - end = std::chrono::system_clock::now(); - auto c1 = std::chrono::duration_cast(end-start).count(); - - start = std::chrono::system_clock::now(); - for(const Simplex& s : simplices_membership2) - K.membership(s); - end = std::chrono::system_clock::now(); - auto c2 = std::chrono::duration_cast(end-start).count(); - - std::cout << c1 << "\t \t" << c2 << "\t \t" << c3 << "\t \t" << K.num_simplices() << std::endl; -} - -int main(){ - for(int d=5;d<=40;d+=5){ - std::cout << "d=" << d << " \t Insertions \t Membership \t Contractions \t Size" << std::endl; - std::cout << "T Map \t \t"; - chrono(n,d); - std::cout << "Lazy \t \t"; - chrono(n,d); - if(d<=15){ - std::cout << "ST \t \t"; - chrono(n,d); - } - std::cout << std::endl; - } -} -- cgit v1.2.3 From 6f4c7c0177b6ccf88b61056ea9d2ae2b066e056a Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 23 Nov 2017 17:18:13 +0000 Subject: 3 files - 3 docs git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2945 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: ce6663f33da653f2ac520b4d0f3684b4776aec94 --- src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 53 ++++- src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 28 +++ src/Toplex_map/include/gudhi/Lazy_toplex_map.h | 218 --------------------- src/Toplex_map/include/gudhi/Toplex_map.h | 42 ++-- 4 files changed, 98 insertions(+), 243 deletions(-) delete mode 100644 src/Toplex_map/include/gudhi/Lazy_toplex_map.h diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index 3de962af..8876b56d 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -25,39 +25,78 @@ struct Visitor { } }; +/** Fake_simplex_tree is a wrapper for Filtered_toplex_map which has the interface of the Simplex_tree. + * Mostly for retro-compatibility purpose. If you use a function that output non maximal simplices, it will be non efficient. + * \ingroup toplex_map */ class Fake_simplex_tree : public Filtered_toplex_map { public: + /** Vertex is the type of vertices. + * \ingroup toplex_map */ + typedef Toplex_map::Vertex Vertex; + + /** Simplex is the type of simplices. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex Simplex; + + /** The type of the pointers to maximal simplices. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex_ptr Simplex_ptr; + + /** The type of the sets of Simplex_ptr. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; + /** Handle type to a vertex contained in the simplicial complex. + * \ingroup toplex_map */ typedef Vertex Vertex_handle; + /** Handle type to a simplex contained in the simplicial complex. + * \ingroup toplex_map */ typedef Simplex Simplex_handle; typedef void Insertion_result_type; - /** \brief Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` in the simplicial - * complex. */ + /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` + * in the simplicial complex. + * \ingroup toplex_map */ template void insert_graph(const OneSkeletonGraph& skel_graph); - /** \brief Do nothing */ + /** Do actually nothing. + * \ingroup toplex_map */ void expansion(int max_dim); - /** \brief Returns the number of vertices stored i.e. the number of max simplices */ + /** Returns the number of vertices stored i.e. the number of max simplices + * \ingroup toplex_map */ std::size_t num_vertices() const; + /** Returns the dimension of the complex. + * \ingroup toplex_map */ std::size_t dimension() const; + /** Returns the dimension of a given simplex in the complex. + * \ingroup toplex_map */ std::size_t dimension(Simplex_ptr& sptr) const; + /** Returns the number of simplices stored i.e. the number of maximal simplices. + * \ingroup toplex_map */ std::size_t num_simplices() const; + /** Returns a range over the vertices of a simplex. + * \ingroup toplex_map */ Simplex simplex_vertex_range(const Simplex& s) const; + /** Returns a set of all maximal (critical if there is filtration values) simplices. + * \ingroup toplex_map */ std::vector max_simplices() const; + /** Returns all the simplices, of max dimension d if a parameter d is given. + * \ingroup toplex_map */ std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; + /** Returns all the simplices of max dimension d + * \ingroup toplex_map */ std::vector skeleton_simplex_range(int d) const; @@ -73,6 +112,12 @@ protected: template void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ toplex_maps.emplace(nan(""),Toplex_map()); + using vertex_iterator = typename boost::graph_traits::vertex_iterator; + vertex_iterator vi, vi_end; + for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { + Simplex s; s.insert(*vi); + insert_simplex_and_subfaces(s); + } bron_kerbosch_all_cliques(skel_graph, Visitor(&(this->toplex_maps.at(nan(""))))); } diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index a0c24304..28814d15 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -7,17 +7,45 @@ namespace Gudhi { +/** A Filtered_toplex_map represents the simplicial complex with a filtration. + * A "toplex" is a critical simplex. + * \ingroup toplex_map */ class Filtered_toplex_map { public: + /** Vertex is the type of vertices. + * \ingroup toplex_map */ + typedef Toplex_map::Vertex Vertex; + + /** Simplex is the type of simplices. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex Simplex; + + /** The type of the pointers to maximal simplices. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex_ptr Simplex_ptr; + + /** The type of the sets of Simplex_ptr. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; + + /** The type of the filtration values. + * \ingroup toplex_map */ typedef double Filtration_value; + /** Add a simplex and its subfaces with the given filtration value + * in the Filtered_toplex_map. + * \ingroup toplex_map */ template std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = nan("")); + /** Gives the filtration of the input simplex. + * \ingroup toplex_map */ template Filtration_value filtration(const Input_vertex_range &vertex_range) const; + /** Is the input simplex member of the complex ? + * \ingroup toplex_map */ template bool membership(const Input_vertex_range &vertex_range) const; diff --git a/src/Toplex_map/include/gudhi/Lazy_toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_toplex_map.h deleted file mode 100644 index 3ffe8214..00000000 --- a/src/Toplex_map/include/gudhi/Lazy_toplex_map.h +++ /dev/null @@ -1,218 +0,0 @@ -#ifndef LAZY_TOPLEX_MAP_H -#define LAZY_TOPLEX_MAP_H - -#include -#include - -namespace Gudhi { - -class Lazy_Toplex_map { - -public: - template - void insert_max_simplex(const Input_vertex_range &vertex_range); - template - bool insert_simplex(const Input_vertex_range &vertex_range); - template - void remove_simplex(const Input_vertex_range &vertex_range); - - template - bool membership(const Input_vertex_range &vertex_range); - template - bool all_facets_inside(const Input_vertex_range &vertex_range); - - Vertex contraction(const Vertex x, const Vertex y); - - std::size_t num_simplices() const; - -private: - template - void erase_max(const Input_vertex_range &vertex_range); - template - Vertex best_index(const Input_vertex_range &vertex_range); - void clean(const Vertex v); - - std::unordered_map t0; - bool empty_toplex; // Is the empty simplex a toplex ? - - typedef boost::heap::fibonacci_heap> PriorityQueue; - PriorityQueue cleaning_priority; - std::unordered_map cp_handles; - - std::unordered_map gamma0_lbounds; - std::size_t get_gamma0_lbound(const Vertex v) const; - - std::size_t size_lbound = 0; - std::size_t size = 0; - - const double alpha = 2; //time - const double betta = 3; //memory -}; - -template -void Lazy_Toplex_map::insert_max_simplex(const Input_vertex_range &vertex_range){ - for(const Vertex& v : vertex_range) - if(!gamma0_lbounds.count(v)) gamma0_lbounds.emplace(v,1); - else gamma0_lbounds[v]++; - size_lbound++; - insert_simplex(vertex_range); -} - -template -bool Lazy_Toplex_map::insert_simplex(const Input_vertex_range &vertex_range){ - Simplex sigma(vertex_range.begin(),vertex_range.end()); - empty_toplex = (sigma.size()==0); //vérifier la gestion de empty face - Simplex_ptr sptr = std::make_shared(sigma); - bool inserted = false; - for(const Vertex& v : sigma){ - if(!t0.count(v)){ - t0.emplace(v, Simplex_ptr_set()); - auto v_handle = cleaning_priority.push(std::make_pair(0, v)); - cp_handles.emplace(v, v_handle); - } - inserted = t0.at(v).emplace(sptr).second; - cleaning_priority.update(cp_handles.at(v), std::make_pair(t0.at(v).size() - get_gamma0_lbound(v),v)); - } - if(inserted) - size++; - if(size > size_lbound * betta) - clean(cleaning_priority.top().second); - return inserted; -} - -template -void Lazy_Toplex_map::remove_simplex(const Input_vertex_range &vertex_range){ - if(vertex_range.begin()==vertex_range.end()){ - t0.clear(); - gamma0_lbounds.clear(); - cleaning_priority.clear(); - size_lbound = 0; - size = 0; - empty_toplex = false; - } - else { - const Vertex& v = best_index(vertex_range); - //Copy constructor needed because the set is modified - if(t0.count(v)) for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))) - if(included(vertex_range, *sptr)){ - erase_max(*sptr); - for(const Simplex& f : facets(vertex_range)) - insert_max_simplex(f); - } - } -} - -template -bool Lazy_Toplex_map::membership(const Input_vertex_range &vertex_range){ - if(t0.size()==0 && !empty_toplex) return false; //empty complex - if(vertex_range.begin()==vertex_range.end()) return true; //empty query simplex - Vertex v = best_index(vertex_range); - if(!t0.count(v)) return false; - for(const Simplex_ptr& sptr : t0.at(v)) - if(included(vertex_range, *sptr)) return true; - return false; -} - -template -bool Lazy_Toplex_map::all_facets_inside(const Input_vertex_range &vertex_range){ - Simplex sigma(vertex_range.begin(),vertex_range.end()); - Vertex v = best_index(sigma); - if(!t0.count(v)) return false; - Simplex f = sigma; f.erase(v); - if(!membership(f)) return false; - std::unordered_set facets_inside; - for(const Simplex_ptr& sptr : t0.at(v)) - for(const Vertex& w : sigma){ - f = sigma; f.erase(w); - if(included(f, *sptr)) facets_inside.insert(w); - } - return facets_inside.size() == sigma.size() - 1; -} - -/* Returns the remaining vertex */ -Vertex Lazy_Toplex_map::contraction(const Vertex x, const Vertex y){ - if(!t0.count(x)) return y; - if(!t0.count(y)) return x; - Vertex k, d; - if(t0.at(x).size() > t0.at(y).size()) - k=x, d=y; - else - k=y, d=x; - //Copy constructor needed because the set is modified - for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ - Simplex sigma(*sptr); - erase_max(sigma); - sigma.erase(d); - sigma.insert(k); - insert_simplex(sigma); - } - t0.erase(d); - return k; -} - -/* No facets insert_simplexed */ -template -inline void Lazy_Toplex_map::erase_max(const Input_vertex_range &vertex_range){ - Simplex sigma(vertex_range.begin(),vertex_range.end()); - empty_toplex = false; - Simplex_ptr sptr = std::make_shared(sigma); - bool erased; - for(const Vertex& v : sigma){ - erased = t0.at(v).erase(sptr) > 0; - if(t0.at(v).size()==0) - t0.erase(v); - } - if (erased) - size--; -} - -template -Vertex Lazy_Toplex_map::best_index(const Input_vertex_range &vertex_range){ - Simplex tau(vertex_range.begin(),vertex_range.end()); - std::size_t min = std::numeric_limits::max(); Vertex arg_min = -1; - for(const Vertex& v : tau) - if(!t0.count(v)) return v; - else if(t0.at(v).size() < min) - min = t0.at(v).size(), arg_min = v; - if(min > alpha * get_gamma0_lbound(arg_min)) - clean(arg_min); - return arg_min; -} - -std::size_t Lazy_Toplex_map::get_gamma0_lbound(const Vertex v) const{ - return gamma0_lbounds.count(v) ? gamma0_lbounds.at(v) : 0; -} - - -void Lazy_Toplex_map::clean(const Vertex v){ - Toplex_map toplices; - std::unordered_map> dsorted_simplices; - int max_dim = 0; - for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))){ - if(sptr->size() > max_dim){ - for(int d = max_dim+1; d<=sptr->size(); d++) - dsorted_simplices.emplace(d, std::vector()); - max_dim = sptr->size(); - } - dsorted_simplices[sptr->size()].emplace_back(*sptr); - erase_max(*sptr); - } - for(int d = max_dim; d>=1; d--) - for(const Simplex &s : dsorted_simplices.at(d)) - if(!toplices.membership(s)) - toplices.insert_independent_simplex(s); - Simplex sv; sv.insert(v); - auto clean_cofaces = toplices.maximal_cofaces(sv); - size_lbound = size_lbound - get_gamma0_lbound(v) + clean_cofaces.size(); - gamma0_lbounds[v] = clean_cofaces.size(); - for(const Simplex_ptr& sptr : clean_cofaces) - insert_simplex(*sptr); -} - -std::size_t Lazy_Toplex_map::num_simplices() const{ - return size; -} - -} //namespace Gudhi - -#endif /* LAZY_TOPLEX_MAP_H */ diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index 9de3a6be..b433f3de 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -11,30 +11,31 @@ namespace Gudhi { -/** Vertex is the type of vertices. - * \ingroup toplex_map */ -typedef std::size_t Vertex; - -/** Simplex is the type of simplices. - * \ingroup toplex_map */ -typedef std::unordered_set Simplex; - -/** The type of the pointers to maximal simplices. - * \ingroup toplex_map */ -typedef std::shared_ptr Simplex_ptr; - -struct Sptr_hash{ std::size_t operator()(const Simplex_ptr& s) const; }; -struct Sptr_equal{ std::size_t operator()(const Simplex_ptr& a, const Simplex_ptr& b) const; }; -/** The type of the sets of Simplex_ptr. - * \ingroup toplex_map */ -typedef std::unordered_set Simplex_ptr_set; - /** A Toplex_map represents the simplicial complex. * A "toplex" is a maximal simplex. * \ingroup toplex_map */ class Toplex_map { public: + + /** Vertex is the type of vertices. + * \ingroup toplex_map */ + typedef std::size_t Vertex; + + /** Simplex is the type of simplices. + * \ingroup toplex_map */ + typedef std::unordered_set Simplex; + + /** The type of the pointers to maximal simplices. + * \ingroup toplex_map */ + typedef std::shared_ptr Simplex_ptr; + + struct Sptr_hash{ std::size_t operator()(const Simplex_ptr& s) const; }; + struct Sptr_equal{ std::size_t operator()(const Simplex_ptr& a, const Simplex_ptr& b) const; }; + /** The type of the sets of Simplex_ptr. + * \ingroup toplex_map */ + typedef std::unordered_set Simplex_ptr_set; + /** \brief Adds the given simplex to the complex. * Nothing happens if the simplex has a coface in the complex. * \ingroup toplex_map */ @@ -58,7 +59,7 @@ public: bool maximality(const Input_vertex_range &vertex_range) const; /** Gives a set of pointers to the maximal cofaces of a simplex. - * Gives the toplices if given the empty simplex. + * Gives all the toplices if given the empty simplex. * Gives not more than max_number maximal cofaces if max_number is strictly positive. * \ingroup toplex_map */ template @@ -85,8 +86,7 @@ public: void remove_vertex(const Vertex x); /** \brief Number of maximal simplices. - * /!\ Not efficient ! - * \ingroup toplex_map */ + * \ingroup toplex_map */ std::size_t num_simplices() const; protected: -- cgit v1.2.3 From a983b3bec81a6909c75f6760281cb09ab296123e Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 23 Nov 2017 17:20:14 +0000 Subject: test name git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2946 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d663b0f8578cb9a3ebced7d170bae5c924a02d78 --- src/Toplex_map/test/CMakeLists.txt | 2 +- src/Toplex_map/test/test.cpp | 71 ---------------------------- src/Toplex_map/test/toplex_map_unit_test.cpp | 71 ++++++++++++++++++++++++++++ 3 files changed, 72 insertions(+), 72 deletions(-) delete mode 100644 src/Toplex_map/test/test.cpp create mode 100644 src/Toplex_map/test/toplex_map_unit_test.cpp diff --git a/src/Toplex_map/test/CMakeLists.txt b/src/Toplex_map/test/CMakeLists.txt index 223ebccb..25fcabac 100644 --- a/src/Toplex_map/test/CMakeLists.txt +++ b/src/Toplex_map/test/CMakeLists.txt @@ -6,7 +6,7 @@ target_link_libraries(ToplexMapUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAM # Unitary tests -add_test(NAME SalUT +add_test(NAME ToplexMapUT COMMAND ${CMAKE_CURRENT_BINARY_DIR}/ToplexMapUT ${CMAKE_SOURCE_DIR}/src/Toplex_map/test/test.txt # XML format for Jenkins xUnit plugin diff --git a/src/Toplex_map/test/test.cpp b/src/Toplex_map/test/test.cpp deleted file mode 100644 index 3f4d96c2..00000000 --- a/src/Toplex_map/test/test.cpp +++ /dev/null @@ -1,71 +0,0 @@ -#include -#include -#include -#include - -#define BOOST_TEST_DYN_LINK -#define BOOST_TEST_MODULE "toplex map" -#include - -using namespace Gudhi; - -std::vector sigma1 = {1, 2, 3, 4}; -std::vector sigma2 = {5, 2, 3, 6}; -std::vector sigma3 = {5}; -std::vector sigma4 = {5, 2, 3}; -std::vector sigma5 = {5, 2, 7}; -std::vector sigma6 = {4, 5, 3}; -std::vector sigma7 = {4, 5, 9}; -std::vector sigma8 = {1, 2, 3, 6}; - - -BOOST_AUTO_TEST_CASE(toplexmap) { - Toplex_map K; - K.insert_simplex(sigma1); - K.insert_simplex(sigma2); - K.insert_simplex(sigma3); - K.insert_simplex(sigma6); - K.insert_simplex(sigma7); - BOOST_CHECK(K.membership(sigma4)); - BOOST_CHECK(!K.maximality(sigma5)); - BOOST_CHECK(!K.membership(sigma5)); - K.contraction(4,5); - BOOST_CHECK(!K.membership(sigma6)); -} - -BOOST_AUTO_TEST_CASE(ltoplexmap) { - Lazy_Toplex_map K; - K.insert_simplex(sigma1); - K.insert_simplex(sigma2); - K.insert_simplex(sigma3); - K.insert_simplex(sigma6); - K.insert_simplex(sigma7); - BOOST_CHECK(K.membership(sigma4)); - BOOST_CHECK(!K.membership(sigma5)); - K.contraction(4,5); - BOOST_CHECK(!K.membership(sigma6)); -} - -BOOST_AUTO_TEST_CASE(ftoplexmap) { - Filtered_toplex_map K; - K.insert_simplex_and_subfaces(sigma1, 2.); - K.insert_simplex_and_subfaces(sigma2, 2.); - K.insert_simplex_and_subfaces(sigma6, 1.); - K.insert_simplex_and_subfaces(sigma7, 1.); - BOOST_CHECK(K.filtration(sigma4)==2.); - BOOST_CHECK(K.filtration(sigma3)==1.); -} - -/* -BOOST_AUTO_TEST_CASE(toplexmap_candidates) { - Toplex_map K; - K.insert_simplex(sigma1); - K.insert_simplex(sigma2); - K.remove_simplex(sigma1); - K.remove_simplex(sigma2); - auto c = K.candidates(); - BOOST_CHECK(c.count(get_key(sigma1))); - BOOST_CHECK(c.count(get_key(sigma2))); - BOOST_CHECK(c.size()==2); -} -*/ diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp new file mode 100644 index 00000000..3f4d96c2 --- /dev/null +++ b/src/Toplex_map/test/toplex_map_unit_test.cpp @@ -0,0 +1,71 @@ +#include +#include +#include +#include + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "toplex map" +#include + +using namespace Gudhi; + +std::vector sigma1 = {1, 2, 3, 4}; +std::vector sigma2 = {5, 2, 3, 6}; +std::vector sigma3 = {5}; +std::vector sigma4 = {5, 2, 3}; +std::vector sigma5 = {5, 2, 7}; +std::vector sigma6 = {4, 5, 3}; +std::vector sigma7 = {4, 5, 9}; +std::vector sigma8 = {1, 2, 3, 6}; + + +BOOST_AUTO_TEST_CASE(toplexmap) { + Toplex_map K; + K.insert_simplex(sigma1); + K.insert_simplex(sigma2); + K.insert_simplex(sigma3); + K.insert_simplex(sigma6); + K.insert_simplex(sigma7); + BOOST_CHECK(K.membership(sigma4)); + BOOST_CHECK(!K.maximality(sigma5)); + BOOST_CHECK(!K.membership(sigma5)); + K.contraction(4,5); + BOOST_CHECK(!K.membership(sigma6)); +} + +BOOST_AUTO_TEST_CASE(ltoplexmap) { + Lazy_Toplex_map K; + K.insert_simplex(sigma1); + K.insert_simplex(sigma2); + K.insert_simplex(sigma3); + K.insert_simplex(sigma6); + K.insert_simplex(sigma7); + BOOST_CHECK(K.membership(sigma4)); + BOOST_CHECK(!K.membership(sigma5)); + K.contraction(4,5); + BOOST_CHECK(!K.membership(sigma6)); +} + +BOOST_AUTO_TEST_CASE(ftoplexmap) { + Filtered_toplex_map K; + K.insert_simplex_and_subfaces(sigma1, 2.); + K.insert_simplex_and_subfaces(sigma2, 2.); + K.insert_simplex_and_subfaces(sigma6, 1.); + K.insert_simplex_and_subfaces(sigma7, 1.); + BOOST_CHECK(K.filtration(sigma4)==2.); + BOOST_CHECK(K.filtration(sigma3)==1.); +} + +/* +BOOST_AUTO_TEST_CASE(toplexmap_candidates) { + Toplex_map K; + K.insert_simplex(sigma1); + K.insert_simplex(sigma2); + K.remove_simplex(sigma1); + K.remove_simplex(sigma2); + auto c = K.candidates(); + BOOST_CHECK(c.count(get_key(sigma1))); + BOOST_CHECK(c.count(get_key(sigma2))); + BOOST_CHECK(c.size()==2); +} +*/ -- cgit v1.2.3 From 5430f6a24b6909f1d63cd2028e88ef2d69fb8a0d Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 23 Nov 2017 17:27:56 +0000 Subject: examples added git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@2947 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 1f1710698fae160c3ad9f55f402e11f200877f88 --- src/Toplex_map/example/Simple_toplex_map.cpp | 214 +++++++++++++++++++++ .../example/Toplex_map_from_cliques_of_graph.cpp | 94 +++++++++ src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 2 +- 3 files changed, 309 insertions(+), 1 deletion(-) create mode 100644 src/Toplex_map/example/Simple_toplex_map.cpp create mode 100644 src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp diff --git a/src/Toplex_map/example/Simple_toplex_map.cpp b/src/Toplex_map/example/Simple_toplex_map.cpp new file mode 100644 index 00000000..b165af8a --- /dev/null +++ b/src/Toplex_map/example/Simple_toplex_map.cpp @@ -0,0 +1,214 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2017 + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include + +#include +#include // for pair +#include + +using Toplex_map = Gudhi::Fake_simplex_tree; +using typeVectorVertex = std::vector< Toplex_map::Vertex_handle >; +using typePairSimplexBool = std::pair< Toplex_map::Simplex_handle, bool >; + +int main(int argc, char * const argv[]) { + + // TEST OF INSERTION + std::cout << "********************************************************************" << std::endl; + std::cout << "EXAMPLE OF SIMPLE INSERTION" << std::endl; + // Construct the Toplex_map + Toplex_map t_map; + + /* Simplex to be inserted: */ + /* 1 */ + /* o */ + /* /X\ */ + /* o---o---o */ + /* 2 0 3 */ + + // ++ FIRST + std::cout << " * INSERT 0" << std::endl; + typeVectorVertex firstSimplexVector = { 0 }; + typePairSimplexBool returnValue = t_map.insert_simplex_and_subfaces(firstSimplexVector, 0.1); + + if (returnValue.second == true) { + std::cout << " + 0 INSERTED" << std::endl; + } else { + std::cout << " - 0 NOT INSERTED" << std::endl; + } + + // ++ SECOND + std::cout << " * INSERT 1" << std::endl; + typeVectorVertex secondSimplexVector = { 1 }; + returnValue = t_map.insert_simplex_and_subfaces(secondSimplexVector, 0.1); + + if (returnValue.second == true) { + std::cout << " + 1 INSERTED" << std::endl; + } else { + std::cout << " - 1 NOT INSERTED" << std::endl; + } + + // ++ THIRD + std::cout << " * INSERT (0,1)" << std::endl; + typeVectorVertex thirdSimplexVector = { 0, 1 }; + returnValue = + t_map.insert_simplex_and_subfaces(thirdSimplexVector, 0.2); + + if (returnValue.second == true) { + std::cout << " + (0,1) INSERTED" << std::endl; + } else { + std::cout << " - (0,1) NOT INSERTED" << std::endl; + } + + // ++ FOURTH + std::cout << " * INSERT 2" << std::endl; + typeVectorVertex fourthSimplexVector = { 2 }; + returnValue = + t_map.insert_simplex_and_subfaces(fourthSimplexVector, 0.1); + + if (returnValue.second == true) { + std::cout << " + 2 INSERTED" << std::endl; + } else { + std::cout << " - 2 NOT INSERTED" << std::endl; + } + + // ++ FIFTH + std::cout << " * INSERT (2,0)" << std::endl; + typeVectorVertex fifthSimplexVector = { 2, 0 }; + returnValue = + t_map.insert_simplex_and_subfaces(fifthSimplexVector, 0.2); + + if (returnValue.second == true) { + std::cout << " + (2,0) INSERTED" << std::endl; + } else { + std::cout << " - (2,0) NOT INSERTED" << std::endl; + } + + // ++ SIXTH + std::cout << " * INSERT (2,1)" << std::endl; + typeVectorVertex sixthSimplexVector = { 2, 1 }; + returnValue = + t_map.insert_simplex_and_subfaces(sixthSimplexVector, 0.2); + + if (returnValue.second == true) { + std::cout << " + (2,1) INSERTED" << std::endl; + } else { + std::cout << " - (2,1) NOT INSERTED" << std::endl; + } + + // ++ SEVENTH + std::cout << " * INSERT (2,1,0)" << std::endl; + typeVectorVertex seventhSimplexVector = { 2, 1, 0 }; + returnValue = + t_map.insert_simplex_and_subfaces(seventhSimplexVector, 0.3); + + if (returnValue.second == true) { + std::cout << " + (2,1,0) INSERTED" << std::endl; + } else { + std::cout << " - (2,1,0) NOT INSERTED" << std::endl; + } + + // ++ EIGHTH + std::cout << " * INSERT 3" << std::endl; + typeVectorVertex eighthSimplexVector = { 3 }; + returnValue = + t_map.insert_simplex_and_subfaces(eighthSimplexVector, 0.1); + + if (returnValue.second == true) { + std::cout << " + 3 INSERTED" << std::endl; + } else { + std::cout << " - 3 NOT INSERTED" << std::endl; + } + + // ++ NINETH + std::cout << " * INSERT (3,0)" << std::endl; + typeVectorVertex ninethSimplexVector = { 3, 0 }; + returnValue = + t_map.insert_simplex_and_subfaces(ninethSimplexVector, 0.2); + + if (returnValue.second == true) { + std::cout << " + (3,0) INSERTED" << std::endl; + } else { + std::cout << " - (3,0) NOT INSERTED" << std::endl; + } + + // ++ TENTH + std::cout << " * INSERT 0 (already inserted)" << std::endl; + typeVectorVertex tenthSimplexVector = { 0 }; + // With a different filtration value + returnValue = t_map.insert_simplex_and_subfaces(tenthSimplexVector, 0.4); + + if (returnValue.second == true) { + std::cout << " + 0 INSERTED" << std::endl; + } else { + std::cout << " - 0 NOT INSERTED" << std::endl; + } + + // ++ ELEVENTH + std::cout << " * INSERT (2,1,0) (already inserted)" << std::endl; + typeVectorVertex eleventhSimplexVector = { 2, 1, 0 }; + returnValue = + t_map.insert_simplex_and_subfaces(eleventhSimplexVector, 0.4); + + if (returnValue.second == true) { + std::cout << " + (2,1,0) INSERTED" << std::endl; + } else { + std::cout << " - (2,1,0) NOT INSERTED" << std::endl; + } + + // ++ GENERAL VARIABLE SET + + std::cout << "********************************************************************\n"; + // Display the Simplex_tree - Can not be done in the middle of 2 inserts + std::cout << "* The complex contains " << t_map.num_vertices() << " vertices and " << t_map.num_simplices() + << " simplices - dimension is " << t_map.dimension() << "\n"; + std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n"; + for (auto f_simplex : t_map.filtration_simplex_range()) { + std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; + for (auto vertex : t_map.simplex_vertex_range(f_simplex)) + std::cout << "(" << vertex << ")"; + std::cout << std::endl; + } + // [0.1] 0 + // [0.1] 1 + // [0.1] 2 + // [0.1] 3 + // [0.2] 1 0 + // [0.2] 2 0 + // [0.2] 2 1 + // [0.2] 3 0 + // [0.3] 2 1 0 + + std::cout << std::endl << std::endl; + + std::cout << "Iterator on skeleton:" << std::endl; + for (auto f_simplex : t_map.skeleton_simplex_range()) { + std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; + for (auto vertex : t_map.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } + + return 0; +} diff --git a/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp b/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp new file mode 100644 index 00000000..aad31554 --- /dev/null +++ b/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp @@ -0,0 +1,94 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include + +#include +#include +#include +#include // for std::pair + +using Toplex_map = Gudhi::Fake_simplex_tree; +using Vertex_handle = Toplex_map::Vertex_handle; +using Filtration_value = Toplex_map::Filtration_value; + +typedef boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS, + boost::property < vertex_filtration_t, Filtration_value >, + boost::property < edge_filtration_t, Filtration_value > > Graph_t; + +int main(int argc, char * const argv[]) { + if (argc != 3) { + std::cerr << "Usage: " << argv[0] + << " path_to_file_graph max_dim \n"; + return 0; + } + std::string filegraph = argv[1]; + int max_dim = atoi(argv[2]); + + clock_t start, end; + // Construct the Toplex Map + Toplex_map t_map; + + start = clock(); + auto g = Gudhi::read_graph(filegraph); + // insert the graph in the toplex map as 1-skeleton + t_map.insert_graph(g); + end = clock(); + std::cout << "Insert the 1-skeleton in the toplex map in " + << static_cast(end - start) / CLOCKS_PER_SEC << " s. \n"; + + start = clock(); + // expand the 1-skeleton until dimension max_dim + t_map.expansion(max_dim); + end = clock(); + std::cout << "max_dim = " << max_dim << "\n"; + std::cout << "Expand the toplex map in " + << static_cast(end - start) / CLOCKS_PER_SEC << " s. \n"; + + std::cout << "Information of the toplex map: " << std::endl; + std::cout << " Number of vertices = " << t_map.num_vertices() << " "; + std::cout << " Number of simplices = " << t_map.num_simplices() << std::endl; + std::cout << std::endl << std::endl; + + std::cout << "Iterator on Simplices in the filtration:" << std::endl; + for (auto f_simplex : t_map.filtration_simplex_range()) { + std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; + for (auto vertex : t_map.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } + + std::cout << std::endl << std::endl; + + std::cout << "Iterator on skeleton:" << std::endl; + for (auto f_simplex : t_map.skeleton_simplex_range()) { + std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; + for (auto vertex : t_map.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } + return 0; +} +} diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index 8876b56d..104f1742 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -175,7 +175,7 @@ std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ Simplex s(m.back()); m.pop_back(); if(seen.find(get_key(s))==seen.end()){ - if(s.size()-1<=d) + if((int) s.size()-1 <=d) range.emplace_back(s); seen.emplace(get_key(s)); if(s.size()>0) -- cgit v1.2.3 -- cgit v1.2.3 From 567ca812736b3c05b80219bea85169c406d9a279 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Wed, 6 Dec 2017 15:18:59 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3049 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 2578f46d6953ceb7f1cc2fd4d959c8662e2936d1 --- CMakeLists.txt | 1 + biblio/how_to_cite_gudhi.bib | 9 +++++++++ src/CMakeLists.txt | 1 + src/Doxyfile | 3 ++- 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 10373f75..b28dcbf2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -50,6 +50,7 @@ add_gudhi_module(Subsampling) add_gudhi_module(Tangential_complex) add_gudhi_module(Witness_complex) add_gudhi_module(Nerve_GIC) +add_gudhi_module(Kernels) message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"") diff --git a/biblio/how_to_cite_gudhi.bib b/biblio/how_to_cite_gudhi.bib index 59c05a5b..b8c29109 100644 --- a/biblio/how_to_cite_gudhi.bib +++ b/biblio/how_to_cite_gudhi.bib @@ -122,3 +122,12 @@ , url = "http://gudhi.gforge.inria.fr/python/latest/" , year = 2016 } + +@incollection{gudhi:Kernels +, author = "Mathieu Carri\`ere" +, title = "Kernels for PDs" +, publisher = "{GUDHI Editorial Board}" +, booktitle = "{GUDHI} User and Reference Manual" +, url = "http://gudhi.gforge.inria.fr/python/latest/" +, year = 2017 +} diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 94587044..0ae26081 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -26,6 +26,7 @@ add_gudhi_module(Subsampling) add_gudhi_module(Tangential_complex) add_gudhi_module(Witness_complex) add_gudhi_module(Nerve_GIC) +add_gudhi_module(Kernels) message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"") diff --git a/src/Doxyfile b/src/Doxyfile index 429bf6a1..bda6f03d 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -854,7 +854,8 @@ IMAGE_PATH = doc/Skeleton_blocker/ \ doc/Tangential_complex/ \ doc/Bottleneck_distance/ \ doc/Nerve_GIC/ \ - doc/Persistence_representations/ + doc/Persistence_representations/ \ + doc/Kernels/ # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program -- cgit v1.2.3 -- cgit v1.2.3 From 20034acbc1a6aa83a0a9fd2ee660bcda4dec6ebf Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 7 Dec 2017 15:46:43 +0000 Subject: small modifications git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3053 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 7eeee9f036954c20f1f64369fc74dd64c98000bc --- src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 38 ++++---- src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 4 +- src/Toplex_map/include/gudhi/Toplex_map.h | 100 ++++++++++----------- 3 files changed, 71 insertions(+), 71 deletions(-) diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index 104f1742..ea5ac618 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -49,11 +49,11 @@ public: /** Handle type to a vertex contained in the simplicial complex. * \ingroup toplex_map */ - typedef Vertex Vertex_handle; + typedef Toplex_map::Vertex Vertex_handle; /** Handle type to a simplex contained in the simplicial complex. * \ingroup toplex_map */ - typedef Simplex Simplex_handle; + typedef Toplex_map::Simplex Simplex_handle; typedef void Insertion_result_type; @@ -85,19 +85,19 @@ public: /** Returns a range over the vertices of a simplex. * \ingroup toplex_map */ - Simplex simplex_vertex_range(const Simplex& s) const; + Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; /** Returns a set of all maximal (critical if there is filtration values) simplices. * \ingroup toplex_map */ - std::vector max_simplices() const; + std::vector max_simplices() const; /** Returns all the simplices, of max dimension d if a parameter d is given. * \ingroup toplex_map */ - std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; + std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; /** Returns all the simplices of max dimension d * \ingroup toplex_map */ - std::vector skeleton_simplex_range(int d) const; + std::vector skeleton_simplex_range(int d) const; protected: @@ -148,31 +148,31 @@ std::size_t Fake_simplex_tree::num_simplices() const { } std::size_t Fake_simplex_tree::num_vertices() const { - std::unordered_set vertices; - for(const Simplex& s : max_simplices()) - for (Vertex v : s) + std::unordered_set vertices; + for(const Toplex_map::Simplex& s : max_simplices()) + for (Toplex_map::Vertex v : s) vertices.emplace(v); return vertices.size(); } -Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { +Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { return s; } -std::vector Fake_simplex_tree::max_simplices() const{ - std::vector max_s; +std::vector Fake_simplex_tree::max_simplices() const{ + std::vector max_s; for(auto kv : toplex_maps) - for(const Simplex_ptr& sptr : kv.second.maximal_cofaces(Simplex())) + for(const Toplex_map::Simplex_ptr& sptr : kv.second.maximal_cofaces(Simplex())) max_s.emplace_back(*sptr); return max_s; } -std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ - std::vector m = max_simplices(); - std::vector range; - Simplex_ptr_set seen; +std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ + std::vector m = max_simplices(); + std::vector range; + Toplex_map::Simplex_ptr_set seen; while(m.begin()!=m.end()){ - Simplex s(m.back()); + Toplex_map::Simplex s(m.back()); m.pop_back(); if(seen.find(get_key(s))==seen.end()){ if((int) s.size()-1 <=d) @@ -186,7 +186,7 @@ std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ return range; } -std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ +std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ return filtration_simplex_range(d); } diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index 28814d15..379c65dd 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -1,4 +1,4 @@ -#ifndef FILTERED_TOPLEX_MAP_H + #ifndef FILTERED_TOPLEX_MAP_H #define FILTERED_TOPLEX_MAP_H #include @@ -54,7 +54,7 @@ protected: }; template -std::pair Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f){ +std::pair Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f){ Simplex s(vertex_range.begin(),vertex_range.end()); if(membership(s)) return make_pair(s,false); if(!toplex_maps.count(f)) toplex_maps.emplace(f,Toplex_map()); diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index b433f3de..00127baf 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -7,7 +7,7 @@ #include #include -#define vertex_upper_bound std::numeric_limits::max() +#define vertex_upper_bound std::numeric_limits::max() namespace Gudhi { @@ -24,17 +24,17 @@ public: /** Simplex is the type of simplices. * \ingroup toplex_map */ - typedef std::unordered_set Simplex; + typedef std::unordered_set Simplex; /** The type of the pointers to maximal simplices. * \ingroup toplex_map */ - typedef std::shared_ptr Simplex_ptr; + typedef std::shared_ptr Simplex_ptr; - struct Sptr_hash{ std::size_t operator()(const Simplex_ptr& s) const; }; - struct Sptr_equal{ std::size_t operator()(const Simplex_ptr& a, const Simplex_ptr& b) const; }; - /** The type of the sets of Simplex_ptr. + struct Sptr_hash{ std::size_t operator()(const Toplex_map::Simplex_ptr& s) const; }; + struct Sptr_equal{ std::size_t operator()(const Toplex_map::Simplex_ptr& a, const Toplex_map::Simplex_ptr& b) const; }; + /** The type of the sets of Toplex_map::Simplex_ptr. * \ingroup toplex_map */ - typedef std::unordered_set Simplex_ptr_set; + typedef std::unordered_set Simplex_ptr_set; /** \brief Adds the given simplex to the complex. * Nothing happens if the simplex has a coface in the complex. @@ -63,13 +63,13 @@ public: * Gives not more than max_number maximal cofaces if max_number is strictly positive. * \ingroup toplex_map */ template - Simplex_ptr_set maximal_cofaces(const Input_vertex_range &vertex_range, const std::size_t max_number = 0) const; + Toplex_map::Simplex_ptr_set maximal_cofaces(const Input_vertex_range &vertex_range, const std::size_t max_number = 0) const; /** Contracts one edge in the complex. * The edge has to verify the link condition if you want to preserve topology. * Returns the remaining vertex. * \ingroup toplex_map */ - Vertex contraction(const Vertex x, const Vertex y); + Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); /** Adds the given simplex to the complex. * The simplex must not have neither maximal face nor coface in the complex. @@ -79,11 +79,11 @@ public: /** \internal Removes a toplex without adding facets after. * \ingroup toplex_map */ - void erase_maximal(const Simplex_ptr& sptr); + void erase_maximal(const Toplex_map::Simplex_ptr& sptr); /** Removes a vertex from any simplex containing it. * \ingroup toplex_map */ - void remove_vertex(const Vertex x); + void remove_vertex(const Toplex_map::Vertex x); /** \brief Number of maximal simplices. * \ingroup toplex_map */ @@ -93,16 +93,16 @@ protected: /** \internal Gives an index in order to look for a simplex quickly. * \ingroup toplex_map */ template - Vertex best_index(const Input_vertex_range &vertex_range) const; + Toplex_map::Vertex best_index(const Input_vertex_range &vertex_range) const; /** \internal The map from vertices to toplices * \ingroup toplex_map */ - std::unordered_map t0; + std::unordered_map t0; }; // Pointers are also used as key in the hash sets. template -Simplex_ptr get_key(const Input_vertex_range &vertex_range); +Toplex_map::Simplex_ptr get_key(const Input_vertex_range &vertex_range); // Is the first simplex a face of the second ? template @@ -110,24 +110,24 @@ bool included(const Input_vertex_range1 &vertex_range1, const Input_vertex_range // All the facets of the given simplex. template -std::vector facets(const Input_vertex_range &vertex_range); +std::vector facets(const Input_vertex_range &vertex_range); template void Toplex_map::insert_simplex(const Input_vertex_range &vertex_range){ if(membership(vertex_range)) return; bool replace_facets = true; - for(const Simplex& facet : facets(vertex_range)) + for(const Toplex_map::Simplex& facet : facets(vertex_range)) if(!maximality(facet)) { replace_facets=false; break; } if(replace_facets) - for(const Simplex& facet : facets(vertex_range)) + for(const Toplex_map::Simplex& facet : facets(vertex_range)) erase_maximal(get_key(facet)); else - for(const Vertex& v : vertex_range) - if(t0.count(v)) for(const Simplex_ptr& fptr : Simplex_ptr_set(t0.at(v))) + for(const Toplex_map::Vertex& v : vertex_range) + if(t0.count(v)) for(const Toplex_map::Simplex_ptr& fptr : Simplex_ptr_set(t0.at(v))) //Copy constructor needed because the set is modified if(included(*fptr,vertex_range)) erase_maximal(fptr); // We erase all the maximal faces of the simplex @@ -140,12 +140,12 @@ void Toplex_map::remove_simplex(const Input_vertex_range &vertex_range){ t0.clear(); // Removal of the empty simplex means cleaning everything else { - const Vertex& v = best_index(vertex_range); - if(t0.count(v)) for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))) + const Toplex_map::Vertex& v = best_index(vertex_range); + if(t0.count(v)) for(const Toplex_map::Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))) //Copy constructor needed because the set is modified if(included(vertex_range, *sptr)){ erase_maximal(sptr); - for(const Simplex& f : facets(vertex_range)) + for(const Toplex_map::Simplex& f : facets(vertex_range)) if(!membership(f)) insert_independent_simplex(f); // We add the facets which are new maximal simplices } @@ -155,10 +155,10 @@ void Toplex_map::remove_simplex(const Input_vertex_range &vertex_range){ template bool Toplex_map::membership(const Input_vertex_range &vertex_range) const{ if(t0.size()==0) return false; - const Vertex& v = best_index(vertex_range); + const Toplex_map::Vertex& v = best_index(vertex_range); if(!t0.count(v)) return false; if(maximality(vertex_range)) return true; - for(const Simplex_ptr& sptr : t0.at(v)) + for(const Toplex_map::Simplex_ptr& sptr : t0.at(v)) if(included(vertex_range, *sptr)) return true; return false; @@ -166,27 +166,27 @@ bool Toplex_map::membership(const Input_vertex_range &vertex_range) const{ template bool Toplex_map::maximality(const Input_vertex_range &vertex_range) const{ - const Vertex& v = best_index(vertex_range); + const Toplex_map::Vertex& v = best_index(vertex_range); if(!t0.count(v)) return false; return t0.at(v).count(get_key(vertex_range)); } template -Simplex_ptr_set Toplex_map::maximal_cofaces(const Input_vertex_range &vertex_range, const std::size_t max_number) const{ +Toplex_map::Simplex_ptr_set Toplex_map::maximal_cofaces(const Input_vertex_range &vertex_range, const std::size_t max_number) const{ Simplex_ptr_set cofaces; if(maximality(vertex_range)) cofaces.emplace(get_key(vertex_range)); else if(vertex_range.begin()==vertex_range.end()) for(const auto& kv : t0) - for(const Simplex_ptr& sptr : kv.second){ + for(const Toplex_map::Simplex_ptr& sptr : kv.second){ //kv.second is a Simplex_ptr_set cofaces.emplace(sptr); if(cofaces.size()==max_number) return cofaces; } else { - const Vertex& v = best_index(vertex_range); - if(t0.count(v)) for(const Simplex_ptr& sptr : t0.at(v)) + const Toplex_map::Vertex& v = best_index(vertex_range); + if(t0.count(v)) for(const Toplex_map::Simplex_ptr& sptr : t0.at(v)) if(included(vertex_range, *sptr)){ cofaces.emplace(sptr); if(cofaces.size()==max_number) @@ -196,7 +196,7 @@ Simplex_ptr_set Toplex_map::maximal_cofaces(const Input_vertex_range &vertex_ran return cofaces; } -Vertex Toplex_map::contraction(const Vertex x, const Vertex y){ +Toplex_map::Vertex Toplex_map::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ if(!t0.count(x)) return y; if(!t0.count(y)) return x; int k, d; @@ -204,7 +204,7 @@ Vertex Toplex_map::contraction(const Vertex x, const Vertex y){ k=x, d=y; else k=y, d=x; - for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ + for(const Toplex_map::Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ //Copy constructor needed because the set is modified Simplex sigma(*sptr); erase_maximal(sptr); @@ -217,14 +217,14 @@ Vertex Toplex_map::contraction(const Vertex x, const Vertex y){ template void Toplex_map::insert_independent_simplex(const Input_vertex_range &vertex_range){ - for(const Vertex& v : vertex_range){ + for(const Toplex_map::Vertex& v : vertex_range){ if(!t0.count(v)) t0.emplace(v, Simplex_ptr_set()); t0.at(v).emplace(get_key(vertex_range)); } } -void Toplex_map::remove_vertex(const Vertex x){ - for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(x))){ +void Toplex_map::remove_vertex(const Toplex_map::Vertex x){ + for(const Toplex_map::Simplex_ptr& sptr : Simplex_ptr_set(t0.at(x))){ Simplex sigma(*sptr); erase_maximal(sptr); sigma.erase(x); @@ -236,61 +236,61 @@ std::size_t Toplex_map::num_simplices() const{ return maximal_cofaces(Simplex()).size(); } -inline void Toplex_map::erase_maximal(const Simplex_ptr& sptr){ +inline void Toplex_map::erase_maximal(const Toplex_map::Simplex_ptr& sptr){ Simplex sigma(*sptr); if (sptr->size()==0) sigma.insert(vertex_upper_bound); - for(const Vertex& v : sigma){ + for(const Toplex_map::Vertex& v : sigma){ t0.at(v).erase(sptr); if(t0.at(v).size()==0) t0.erase(v); } } template -Vertex Toplex_map::best_index(const Input_vertex_range &vertex_range) const{ +Toplex_map::Vertex Toplex_map::best_index(const Input_vertex_range &vertex_range) const{ std::size_t min = std::numeric_limits::max(); Vertex arg_min = vertex_upper_bound; - for(const Vertex& v : vertex_range) + for(const Toplex_map::Vertex& v : vertex_range) if(!t0.count(v)) return v; else if(t0.at(v).size() < min) min = t0.at(v).size(), arg_min = v; return arg_min; } -std::size_t Sptr_equal::operator()(const Simplex_ptr& s1, const Simplex_ptr& s2) const { +std::size_t Toplex_map::Sptr_equal::operator()(const Toplex_map::Simplex_ptr& s1, const Toplex_map::Simplex_ptr& s2) const { if (s1->size() != s2->size()) return false; return included(*s1,*s2); // inclusion tests equality for same size simplices } -std::size_t Sptr_hash::operator()(const Simplex_ptr& s) const { +std::size_t Toplex_map::Sptr_hash::operator()(const Toplex_map::Simplex_ptr& s) const { std::hash h_f; //double hash works better than int hash size_t h = 0; - for(const Vertex& v : *s) + for(const Toplex_map::Vertex& v : *s) h += h_f(static_cast(v)); return h; } template -Simplex_ptr get_key(const Input_vertex_range &vertex_range){ - Simplex s(vertex_range.begin(), vertex_range.end()); - return std::make_shared(s); +Toplex_map::Simplex_ptr get_key(const Input_vertex_range &vertex_range){ + Toplex_map::Simplex s(vertex_range.begin(), vertex_range.end()); + return std::make_shared(s); } template bool included(const Input_vertex_range1 &vertex_range1, const Input_vertex_range2 &vertex_range2){ - Simplex s2(vertex_range2.begin(), vertex_range2.end()); - for(const Vertex& v : vertex_range1) + Toplex_map::Simplex s2(vertex_range2.begin(), vertex_range2.end()); + for(const Toplex_map::Vertex& v : vertex_range1) if(!s2.count(v)) return false; return true; } template -std::vector facets(const Input_vertex_range &vertex_range){ - std::vector facets; - Simplex f(vertex_range.begin(), vertex_range.end()); - for(const Vertex& v : vertex_range){ +std::vector facets(const Input_vertex_range &vertex_range){ + std::vector facets; + Toplex_map::Simplex f(vertex_range.begin(), vertex_range.end()); + for(const Toplex_map::Vertex& v : vertex_range){ f.erase(v); facets.emplace_back(f); f.insert(v); -- cgit v1.2.3 From fd79fc0f0a216e5b1dc8b2cb466d383eb32c1fd4 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 8 Dec 2017 09:22:24 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3056 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 6fd757a9dfd0abe7ddfbaeca1af667a4c93af34b --- src/Kernels/include/gudhi/PSS.h | 108 +++++++++++++++ src/Kernels/include/gudhi/PWG.h | 204 ++++++++++++++++++++++++++++ src/Kernels/include/gudhi/SW.h | 288 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 600 insertions(+) create mode 100644 src/Kernels/include/gudhi/PSS.h create mode 100644 src/Kernels/include/gudhi/PWG.h create mode 100644 src/Kernels/include/gudhi/SW.h diff --git a/src/Kernels/include/gudhi/PSS.h b/src/Kernels/include/gudhi/PSS.h new file mode 100644 index 00000000..70743c47 --- /dev/null +++ b/src/Kernels/include/gudhi/PSS.h @@ -0,0 +1,108 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef PSS_H_ +#define PSS_H_ + +#define NUMPI 3.14159265359 + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "../../figtree-0.9.3/include/figtree.h" +#include "../../figtree-0.9.3/external/ann_1.1.1/include/ANN/ANN.h" + +using PD = std::vector >; + +namespace Gudhi { +namespace persistence_scale_space { + +double compute_exact_pss(PD PD1, PD PD2, double sigma = 1){ + double k = 0; + for(int i = 0; i < PD1.size(); i++){ + for(int j = 0; j < PD2.size(); j++){ + k += exp( -( pow(PD1[i].first - PD2[j].first, 2) + pow(PD1[i].second - PD2[j].second, 2) )/(8*sigma)) -\ + exp( -( pow(PD1[i].first - PD2[j].second, 2) + pow(PD1[i].second - PD2[j].first, 2) )/(8*sigma)); + } + } + return k/(8*NUMPI*sigma); +} + +double compute_approximate_pss(PD PD1, PD PD2, double sigma = 1, double error = 1e-2){ + + double k = 0; + + int d = 2; int N = PD1.size(); int M = PD2.size(); double h = std::sqrt(8*sigma); + double* x = new double[2*N]; double* y = new double[2*M]; double* q = new double[N]; + for(int i = 0; i < N; i++){ + q[i] = 1.0/(8*NUMPI*sigma); + x[2*i] = PD1[i].first; x[2*i+1] = PD1[i].second; + } + for(int i = 0; i < M; i++){ y[2*i] = PD2[i].first; y[2*i+1] = PD2[i].second; } + double* g_auto = new double[M]; + memset(g_auto, 0, sizeof(double)*M); + + figtree(d, N, M, 1, x, h, q, y, error, g_auto); + for(int i = 0; i < M; i++) k += g_auto[i]; + + for(int i = 0; i < M; i++){ y[2*i] = PD2[i].second; y[2*i+1] = PD2[i].first; } + + figtree(d, N, M, 1, x, h, q, y, error, g_auto); + for(int i = 0; i < M; i++) k -= g_auto[i]; + + delete[] x; delete[] y; delete[] q; delete[] g_auto; + return k; +} + +} // namespace persistence_scale_space + +} // namespace Gudhi + +#endif // PSS_H_ diff --git a/src/Kernels/include/gudhi/PWG.h b/src/Kernels/include/gudhi/PWG.h new file mode 100644 index 00000000..bc491ae7 --- /dev/null +++ b/src/Kernels/include/gudhi/PWG.h @@ -0,0 +1,204 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef PWG_H_ +#define PWG_H_ + +#define NUMPI 3.14159265359 + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include + +using PD = std::vector >; + +namespace Gudhi { +namespace persistence_weighted_gaussian { + +double compute_exact_linear_pwg(PD PD1, PD PD2, double sigma, double C, int p){ + + int num_pts1 = PD1.size(); + int num_pts2 = PD2.size(); + + double k = 0; + for(int i = 0; i < num_pts1; i++){ + for(int j = 0; j < num_pts2; j++){ + k += atan(C*pow(PD1[i].second-PD1[i].first,p))*atan(C*pow(PD2[j].second-PD2[j].first,p))*\ + exp( -( pow(PD1[i].first-PD2[j].first,2) + pow(PD1[i].second-PD2[j].second,2) )/(2*pow(sigma,2)) ); + } + } + + return k; + +} + +double compute_exact_gaussian_pwg(PD PD1, PD PD2, double sigma, double C, int p, double tau){ + + int num_pts1 = PD1.size(); + int num_pts2 = PD2.size(); + + double k1 = 0; + for(int i = 0; i < num_pts1; i++){ + for(int j = 0; j < num_pts1; j++){ + k1 += atan(C*pow(PD1[i].second-PD1[i].first,p))*atan(C*pow(PD1[j].second-PD1[j].first,p))*\ + exp( -( pow(PD1[i].first-PD1[j].first,2) + pow(PD1[i].second-PD1[j].second,2) )/(2*pow(sigma,2)) ); + } + } + + double k2 = 0; + for(int i = 0; i < num_pts2; i++){ + for(int j = 0; j < num_pts2; j++){ + k2 += atan(C*pow(PD2[i].second-PD2[i].first,p))*atan(C*pow(PD2[j].second-PD2[j].first,p))*\ + exp( -( pow(PD2[i].first-PD2[j].first,2) + pow(PD2[i].second-PD2[j].second,2) )/(2*pow(sigma,2)) ); + } + } + + double k3 = compute_exact_linear_pwg(PD1,PD2,sigma,C,p); + return exp( - (k1+k2-2*k3) / (2*pow(tau,2)) ); + +} + +double compute_exact_gaussian_RKHSdist(PD PD1, PD PD2, double sigma, double C, int p){ + + int num_pts1 = PD1.size(); + int num_pts2 = PD2.size(); + + double k1 = 0; + for(int i = 0; i < num_pts1; i++){ + for(int j = 0; j < num_pts1; j++){ + k1 += atan(C*pow(PD1[i].second-PD1[i].first,p))*atan(C*pow(PD1[j].second-PD1[j].first,p))*\ + exp( -( pow(PD1[i].first-PD1[j].first,2) + pow(PD1[i].second-PD1[j].second,2) )/(2*pow(sigma,2)) ); + } + } + + double k2 = 0; + for(int i = 0; i < num_pts2; i++){ + for(int j = 0; j < num_pts2; j++){ + k2 += atan(C*pow(PD2[i].second-PD2[i].first,p))*atan(C*pow(PD2[j].second-PD2[j].first,p))*\ + exp( -( pow(PD2[i].first-PD2[j].first,2) + pow(PD2[i].second-PD2[j].second,2) )/(2*pow(sigma,2)) ); + } + } + + double k3 = compute_exact_linear_pwg(PD1,PD2,sigma,C,p); + return std::sqrt(k1+k2-2*k3); + +} + +double compute_approximate_linear_pwg_from_Fourier_features(const std::vector >& B1, \ + const std::vector >& B2){ + double d = 0; int M = B1.size(); + for(int i = 0; i < M; i++) d += B1[i].first*B2[i].first + B1[i].second*B2[i].second; + return (1.0/M)*d; +} + +double compute_approximate_gaussian_pwg_from_Fourier_features(const std::vector >& B1, \ + const std::vector >& B2, double tau){ + int M = B1.size(); + double d3 = compute_approximate_linear_pwg_from_Fourier_features(B1, B2); + double d1 = 0; double d2 = 0; + for(int i = 0; i < M; i++){d1 += pow(B1[i].first,2) + pow(B1[i].second,2); d2 += pow(B2[i].first,2) + pow(B2[i].second,2);} + return exp( -((1.0/M)*(d1+d2)-2*d3) / (2*pow(tau,2)) ); +} + +double compute_approximate_gaussian_RKHSdist_from_Fourier_features(const std::vector >& B1, \ + const std::vector >& B2){ + int M = B1.size(); + double d3 = compute_approximate_linear_pwg_from_Fourier_features(B1, B2); + double d1 = 0; double d2 = 0; + for(int i = 0; i < M; i++){d1 += pow(B1[i].first,2) + pow(B1[i].second,2); d2 += pow(B2[i].first,2) + pow(B2[i].second,2);} + return std::sqrt((1.0/M)*(d1+d2)-2*d3); +} + +std::vector > compute_Fourier_features(double C, int p, PD D, std::vector > Z){ + int m = D.size(); std::vector > B; int M = Z.size(); + for(int i = 0; i < M; i++){ + double d1 = 0; double d2 = 0; double zx = Z[i].first; double zy = Z[i].second; + for(int j = 0; j < m; j++){ + double x = D[j].first; double y = D[j].second; + d1 += atan(C*pow(y-x,p))*cos(x*zx + y*zy); + d2 += atan(C*pow(y-x,p))*sin(x*zx + y*zy); + } + B.push_back(std::pair(d1,d2)); + } + return B; +} + +std::vector > random_Fourier(double sigma, int M = 1000){ + std::normal_distribution distrib(0,1); std::vector > Z; + std::random_device rd; + for(int i = 0; i < M; i++){ + //unsigned seedx = 2*i; unsigned seedy = 2*i+1; + //std::default_random_engine generatorx(seedx); std::default_random_engine generatory(seedy); + std::mt19937 e1(rd()); std::mt19937 e2(rd()); + double zx = distrib(e1/*generatorx*/); double zy = distrib(e2/*generatory*/); + Z.push_back(std::pair((1/sigma)*zx,(1/sigma)*zy)); + } + return Z; +} + +double compute_approximate_linear_pwg(PD PD1, PD PD2, double sigma, double C, int p, int M = 1000){ + std::vector > Z = random_Fourier(sigma, M); + std::vector > B1 = compute_Fourier_features(C,p,PD1,Z); + std::vector > B2 = compute_Fourier_features(C,p,PD2,Z); + return compute_approximate_linear_pwg_from_Fourier_features(B1,B2); +} + +double compute_approximate_gaussian_pwg(PD PD1, PD PD2, double sigma, double C, int p, double tau, int M = 1000){ + std::vector > Z = random_Fourier(sigma, M); + std::vector > B1 = compute_Fourier_features(C,p,PD1,Z); + std::vector > B2 = compute_Fourier_features(C,p,PD2,Z); + return compute_approximate_gaussian_pwg_from_Fourier_features(B1,B2,tau); +} + + +} // namespace persistence_weighted_gaussian + +} // namespace Gudhi + +#endif //PWG_H_ diff --git a/src/Kernels/include/gudhi/SW.h b/src/Kernels/include/gudhi/SW.h new file mode 100644 index 00000000..6871d990 --- /dev/null +++ b/src/Kernels/include/gudhi/SW.h @@ -0,0 +1,288 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef SW_H_ +#define SW_H_ + +#define NUMPI 3.14159265359 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using PD = std::vector >; + +std::vector > PDi, PDj; + +bool compOri(const int& p, const int& q){ + if(PDi[p].second != PDi[q].second) + return (PDi[p].second < PDi[q].second); + else + return (PDi[p].first > PDi[q].first); +} + +bool compOrj(const int& p, const int& q){ + if(PDj[p].second != PDj[q].second) + return (PDj[p].second < PDj[q].second); + else + return (PDj[p].first > PDj[q].first); +} + +bool sortAngle(const std::pair >& p1, const std::pair >& p2){ + return p1.first < p2.first; +} + +bool myComp(const std::pair & P1, const std::pair & P2){return P1.second < P2.second;} + +namespace Gudhi { +namespace sliced_wasserstein { + + +double compute_approximate_SW(PD PD1, PD PD2, int N = 100){ + + double step = NUMPI/N; double sw = 0; + + // Add projections onto diagonal. + // ****************************** + int n1, n2; n1 = PD1.size(); n2 = PD2.size(); + for (int i = 0; i < n2; i++) + PD1.push_back(std::pair( (PD2[i].first+PD2[i].second)/2, (PD2[i].first+PD2[i].second)/2) ); + for (int i = 0; i < n1; i++) + PD2.push_back(std::pair( (PD1[i].first+PD1[i].second)/2, (PD1[i].first+PD1[i].second)/2) ); + int n = PD1.size(); + + // Sort and compare all projections. + // ********************************* + //#pragma omp parallel for + for (int i = 0; i < N; i++){ + std::vector > L1, L2; + for (int j = 0; j < n; j++){ + L1.push_back( std::pair(j, PD1[j].first*cos(-NUMPI/2+i*step) + PD1[j].second*sin(-NUMPI/2+i*step)) ); + L2.push_back( std::pair(j, PD2[j].first*cos(-NUMPI/2+i*step) + PD2[j].second*sin(-NUMPI/2+i*step)) ); + } + std::sort(L1.begin(),L1.end(), myComp); std::sort(L2.begin(),L2.end(), myComp); + double f = 0; for (int j = 0; j < n; j++) f += std::abs(L1[j].second - L2[j].second); + sw += f*step; + } + return sw/NUMPI; +} + +double compute_int_cos(const double& alpha, const double& beta){ // Valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] + double res; + assert((alpha >= 0 && alpha <= NUMPI) || (alpha >= -NUMPI && alpha <= 0)); + if (alpha >= 0 && alpha <= NUMPI){ + if (cos(alpha) >= 0){ + if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} + else{res = sin(beta)-sin(alpha);} + } + else{ + if(1.5*NUMPI <= beta){res = 2+sin(alpha)+sin(beta);} + else{res = sin(alpha)-sin(beta);} + } + } + if (alpha >= -NUMPI && alpha <= 0){ + if (cos(alpha) <= 0){ + if(-NUMPI/2 <= beta){res = 2+sin(alpha)+sin(beta);} + else{res = sin(alpha)-sin(beta);} + } + else{ + if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} + else{res = sin(beta)-sin(alpha);} + } + } + return res; +} + +double compute_int(const double& theta1, const double& theta2, const int& p, const int& q){ + double norm = std::sqrt(pow(PDi[p].first-PDj[q].first,2) + pow(PDi[p].second-PDj[q].second,2)); + double angle1; + if (PDi[p].first > PDj[q].first) + angle1 = theta1 - asin( (PDi[p].second-PDj[q].second)/norm ); + else + angle1 = theta1 - asin( (PDj[q].second-PDi[p].second)/norm ); + double angle2 = angle1+theta2-theta1; + double integral = compute_int_cos(angle1,angle2); + return norm*integral; +} + +double compute_sw(const std::vector > >& V1, \ + const std::vector > >& V2){ + int N = V1.size(); double sw = 0; + for (int i = 0; i < N; i++){ + std::vector > U,V; U = V1[i]; V = V2[i]; + double theta1, theta2; theta1 = -NUMPI/2; + int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); + while(theta1 != NUMPI/2){ + if(PDi[U[ku].first].first != PDj[V[kv].first].first || PDi[U[ku].first].second != PDj[V[kv].first].second) + if(theta1 != theta2) + sw += compute_int(theta1,theta2,U[ku].first,V[kv].first); + theta1 = theta2; + if ( (theta2 == U[ku].second) && ku < U.size()-1 ){ku++;} + if ( (theta2 == V[kv].second) && kv < V.size()-1 ){kv++;} + theta2 = std::min(U[ku].second, V[kv].second); + } + } + return sw/NUMPI; +} + +double compute_angle(const PD& PersDiag, const int& i, const int& j){ + std::pair vect; double x1,y1, x2,y2; + x1 = PersDiag[i].first; y1 = PersDiag[i].second; + x2 = PersDiag[j].first; y2 = PersDiag[j].second; + if (y1 - y2 > 0){ + vect.first = y1 - y2; + vect.second = x2 - x1;} + else{ + if(y1 - y2 < 0){ + vect.first = y2 - y1; + vect.second = x1 - x2; + } + else{ + vect.first = 0; + vect.second = abs(x1 - x2);} + } + double norm = std::sqrt(pow(vect.first,2) + pow(vect.second,2)); + return asin(vect.second/norm); +} + +double compute_exact_SW(PD PD1, PD PD2){ + + // Add projections onto diagonal. + // ****************************** + int n1, n2; n1 = PD1.size(); n2 = PD2.size(); double max_ordinate = std::numeric_limits::min(); + for (int i = 0; i < n2; i++){ + max_ordinate = std::max(max_ordinate, PD2[i].second); + PD1.push_back(std::pair( ((PD2[i].first+PD2[i].second)/2), ((PD2[i].first+PD2[i].second)/2)) ); + } + for (int i = 0; i < n1; i++){ + max_ordinate = std::max(max_ordinate, PD1[i].second); + PD2.push_back(std::pair( ((PD1[i].first+PD1[i].second)/2), ((PD1[i].first+PD1[i].second)/2)) ); + } + int N = PD1.size(); assert(N==PD2.size()); + + // Slightly perturb the points so that the PDs are in generic positions. + // ********************************************************************* + int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} + double thresh = pow(10,-5+mag); + srand(time(NULL)); + for (int i = 0; i < N; i++){ + PD1[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD1[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); + PD2[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD2[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); + } + + // Compute all angles in both PDs. + // ******************************* + std::vector > > angles1, angles2; + for (int i = 0; i < N; i++){ + for (int j = i+1; j < N; j++){ + double theta1 = compute_angle(PD1,i,j); double theta2 = compute_angle(PD2,i,j); + angles1.push_back(std::pair >(theta1, std::pair(i,j))); + angles2.push_back(std::pair >(theta2, std::pair(i,j))); + } + } + + // Sort angles. + // ************ + std::sort(angles1.begin(), angles1.end(), sortAngle); std::sort(angles2.begin(), angles2.end(), sortAngle); + + // Initialize orders of the points of both PD (given by ordinates when theta = -pi/2). + // *********************************************************************************** + PDi = PD1; PDj = PD2; + std::vector orderp1, orderp2; + for (int i = 0; i < N; i++){orderp1.push_back(i); orderp2.push_back(i);} + std::sort(orderp1.begin(),orderp1.end(),compOri); std::sort(orderp2.begin(),orderp2.end(),compOrj); + + // Find the inverses of the orders. + // ******************************** + std::vector order1(N); std::vector order2(N); + for(int i = 0; i < N; i++){ + for (int j = 0; j < N; j++) + if(orderp1[j] == i) + order1[i] = j; + } + for(int i = 0; i < N; i++){ + for (int j = 0; j < N; j++) + if(orderp2[j] == i) + order2[i] = j; + } + + // Record all inversions of points in the orders as theta varies along the positive half-disk. + // ******************************************************************************************* + std::vector > > anglePerm1(N); + std::vector > > anglePerm2(N); + + int M1 = angles1.size(); + for (int i = 0; i < M1; i++){ + double theta = angles1[i].first; int p = angles1[i].second.first; int q = angles1[i].second.second; + anglePerm1[order1[p]].push_back(std::pair(p,theta)); + anglePerm1[order1[q]].push_back(std::pair(q,theta)); + int a = order1[p]; int b = order1[q]; order1[p] = b; order1[q] = a; + } + + int M2 = angles2.size(); + for (int i = 0; i < M2; i++){ + double theta = angles2[i].first; int p = angles2[i].second.first; int q = angles2[i].second.second; + anglePerm2[order2[p]].push_back(std::pair(p,theta)); + anglePerm2[order2[q]].push_back(std::pair(q,theta)); + int a = order2[p]; int b = order2[q]; order2[p] = b; order2[q] = a; + } + + for (int i = 0; i < N; i++){ + anglePerm1[order1[i]].push_back(std::pair(i,NUMPI/2)); + anglePerm2[order2[i]].push_back(std::pair(i,NUMPI/2)); + } + + // Compute the SW distance with the list of inversions. + // **************************************************** + return compute_sw(anglePerm1,anglePerm2); + +} + +}} + +#endif + + -- cgit v1.2.3 From 21e6cb713dd3db78e68b4140ab2d69508dad01af Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 22 Dec 2017 15:53:56 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3103 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: f7ac7ffce4fbfb04b099797a02999d4a93e3f22b --- src/Kernels/example/kernel_basic_example.cpp | 49 +++++++ src/Kernels/include/gudhi/PSS.h | 4 +- src/Kernels/include/gudhi/SW.h | 198 +++++++++++++++------------ src/Kernels/include/gudhi/figtree-0.9.3.zip | Bin 0 -> 1229617 bytes src/cmake/modules/GUDHI_modules.cmake | 4 +- 5 files changed, 166 insertions(+), 89 deletions(-) create mode 100644 src/Kernels/example/kernel_basic_example.cpp create mode 100644 src/Kernels/include/gudhi/figtree-0.9.3.zip diff --git a/src/Kernels/example/kernel_basic_example.cpp b/src/Kernels/example/kernel_basic_example.cpp new file mode 100644 index 00000000..0a8d83b3 --- /dev/null +++ b/src/Kernels/example/kernel_basic_example.cpp @@ -0,0 +1,49 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Authors: Francois Godi, small modifications by Pawel Dlotko + * + * Copyright (C) 2015 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +//#include +//#include + +#include +#include +#include // for pair +#include // for numeric_limits + +int main() { + std::vector< std::pair > v1, v2; + + v1.emplace_back(2.7, 3.7); + v1.emplace_back(9.6, 14.); + v1.emplace_back(34.2, 34.974); + + v2.emplace_back(2.8, 4.45); + v2.emplace_back(9.5, 14.1); + + + double b1 = Gudhi::sliced_wasserstein::compute_approximate_SW (v1, v2); + double b2 = Gudhi::sliced_wasserstein::compute_exact_SW (v1, v2); + + std::cout << "Approximate Sliced Wasserstein distance = " << b1 << std::endl; + std::cout << "Exact Sliced Wasserstein distance = " << b2 << std::endl; + +} diff --git a/src/Kernels/include/gudhi/PSS.h b/src/Kernels/include/gudhi/PSS.h index 70743c47..5111a09f 100644 --- a/src/Kernels/include/gudhi/PSS.h +++ b/src/Kernels/include/gudhi/PSS.h @@ -56,8 +56,8 @@ #include #include -#include "../../figtree-0.9.3/include/figtree.h" -#include "../../figtree-0.9.3/external/ann_1.1.1/include/ANN/ANN.h" +#include "figtree.h" +#include "ANN.h" using PD = std::vector >; diff --git a/src/Kernels/include/gudhi/SW.h b/src/Kernels/include/gudhi/SW.h index 6871d990..0b041252 100644 --- a/src/Kernels/include/gudhi/SW.h +++ b/src/Kernels/include/gudhi/SW.h @@ -55,47 +55,36 @@ using PD = std::vector >; -std::vector > PDi, PDj; - -bool compOri(const int& p, const int& q){ - if(PDi[p].second != PDi[q].second) - return (PDi[p].second < PDi[q].second); - else - return (PDi[p].first > PDi[q].first); -} - -bool compOrj(const int& p, const int& q){ - if(PDj[p].second != PDj[q].second) - return (PDj[p].second < PDj[q].second); - else - return (PDj[p].first > PDj[q].first); -} - -bool sortAngle(const std::pair >& p1, const std::pair >& p2){ - return p1.first < p2.first; -} - +bool sortAngle(const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);} bool myComp(const std::pair & P1, const std::pair & P2){return P1.second < P2.second;} namespace Gudhi { namespace sliced_wasserstein { +// ******************************************************************** +// Approximate computation. +// ******************************************************************** + +/** \brief Computes an approximation of the Sliced Wasserstein distance between two persistence diagrams + * + * @param[in] N number of points sampled on the circle. + * + */ + double compute_approximate_SW(PD PD1, PD PD2, int N = 100){ double step = NUMPI/N; double sw = 0; // Add projections onto diagonal. - // ****************************** int n1, n2; n1 = PD1.size(); n2 = PD2.size(); for (int i = 0; i < n2; i++) - PD1.push_back(std::pair( (PD2[i].first+PD2[i].second)/2, (PD2[i].first+PD2[i].second)/2) ); + PD1.push_back(std::pair( (PD2[i].first + PD2[i].second)/2, (PD2[i].first + PD2[i].second)/2) ); for (int i = 0; i < n1; i++) - PD2.push_back(std::pair( (PD1[i].first+PD1[i].second)/2, (PD1[i].first+PD1[i].second)/2) ); + PD2.push_back(std::pair( (PD1[i].first + PD1[i].second)/2, (PD1[i].first + PD1[i].second)/2) ); int n = PD1.size(); // Sort and compare all projections. - // ********************************* //#pragma omp parallel for for (int i = 0; i < N; i++){ std::vector > L1, L2; @@ -110,8 +99,55 @@ double compute_approximate_SW(PD PD1, PD PD2, int N = 100){ return sw/NUMPI; } -double compute_int_cos(const double& alpha, const double& beta){ // Valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] - double res; + + + + + + + + + + + + + + + + + + +// ******************************************************************** +// Exact computation. +// ******************************************************************** + + + +// Compute the angle formed by two points of a PD +double compute_angle(const PD & PersDiag, const int & i, const int & j){ + std::pair vect; double x1,y1, x2,y2; + x1 = PersDiag[i].first; y1 = PersDiag[i].second; + x2 = PersDiag[j].first; y2 = PersDiag[j].second; + if (y1 - y2 > 0){ + vect.first = y1 - y2; + vect.second = x2 - x1;} + else{ + if(y1 - y2 < 0){ + vect.first = y2 - y1; + vect.second = x1 - x2; + } + else{ + vect.first = 0; + vect.second = abs(x1 - x2);} + } + double norm = std::sqrt(pow(vect.first,2) + pow(vect.second,2)); + return asin(vect.second/norm); +} + +// Compute the integral of |cos()| between alpha and beta +// Valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] +double compute_int_cos(const double & alpha, const double & beta){ + double res = 0; assert((alpha >= 0 && alpha <= NUMPI) || (alpha >= -NUMPI && alpha <= 0)); if (alpha >= 0 && alpha <= NUMPI){ if (cos(alpha) >= 0){ @@ -136,75 +172,76 @@ double compute_int_cos(const double& alpha, const double& beta){ // Valid only i return res; } -double compute_int(const double& theta1, const double& theta2, const int& p, const int& q){ - double norm = std::sqrt(pow(PDi[p].first-PDj[q].first,2) + pow(PDi[p].second-PDj[q].second,2)); +double compute_int(const double & theta1, const double & theta2, const int & p, const int & q, const PD & PD1, const PD & PD2){ + double norm = std::sqrt(pow(PD1[p].first-PD2[q].first,2) + pow(PD1[p].second-PD2[q].second,2)); double angle1; - if (PDi[p].first > PDj[q].first) - angle1 = theta1 - asin( (PDi[p].second-PDj[q].second)/norm ); + if (PD1[p].first > PD2[q].first) + angle1 = theta1 - asin( (PD1[p].second-PD2[q].second)/norm ); else - angle1 = theta1 - asin( (PDj[q].second-PDi[p].second)/norm ); - double angle2 = angle1+theta2-theta1; + angle1 = theta1 - asin( (PD2[q].second-PD1[p].second)/norm ); + double angle2 = angle1 + theta2 - theta1; double integral = compute_int_cos(angle1,angle2); return norm*integral; } -double compute_sw(const std::vector > >& V1, \ - const std::vector > >& V2){ + + +double compute_sw(const std::vector > > & V1, const std::vector > > & V2, const PD & PD1, const PD & PD2){ int N = V1.size(); double sw = 0; for (int i = 0; i < N; i++){ std::vector > U,V; U = V1[i]; V = V2[i]; double theta1, theta2; theta1 = -NUMPI/2; - int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); + unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); while(theta1 != NUMPI/2){ - if(PDi[U[ku].first].first != PDj[V[kv].first].first || PDi[U[ku].first].second != PDj[V[kv].first].second) + if(PD1[U[ku].first].first != PD2[V[kv].first].first || PD1[U[ku].first].second != PD2[V[kv].first].second) if(theta1 != theta2) - sw += compute_int(theta1,theta2,U[ku].first,V[kv].first); + sw += Gudhi::sliced_wasserstein::compute_int(theta1, theta2, U[ku].first, V[kv].first, PD1, PD2); theta1 = theta2; - if ( (theta2 == U[ku].second) && ku < U.size()-1 ){ku++;} - if ( (theta2 == V[kv].second) && kv < V.size()-1 ){kv++;} + if ( (theta2 == U[ku].second) && ku < U.size()-1 ) ku++; + if ( (theta2 == V[kv].second) && kv < V.size()-1 ) kv++; theta2 = std::min(U[ku].second, V[kv].second); } } return sw/NUMPI; } -double compute_angle(const PD& PersDiag, const int& i, const int& j){ - std::pair vect; double x1,y1, x2,y2; - x1 = PersDiag[i].first; y1 = PersDiag[i].second; - x2 = PersDiag[j].first; y2 = PersDiag[j].second; - if (y1 - y2 > 0){ - vect.first = y1 - y2; - vect.second = x2 - x1;} - else{ - if(y1 - y2 < 0){ - vect.first = y2 - y1; - vect.second = x1 - x2; - } - else{ - vect.first = 0; - vect.second = abs(x1 - x2);} - } - double norm = std::sqrt(pow(vect.first,2) + pow(vect.second,2)); - return asin(vect.second/norm); -} + + + + + + + + + + + + + + + + + + +/** \brief Computes the Sliced Wasserstein distance between two persistence diagrams + * + */ double compute_exact_SW(PD PD1, PD PD2){ // Add projections onto diagonal. - // ****************************** - int n1, n2; n1 = PD1.size(); n2 = PD2.size(); double max_ordinate = std::numeric_limits::min(); + int n1, n2; n1 = PD1.size(); n2 = PD2.size(); double max_ordinate = std::numeric_limits::lowest(); for (int i = 0; i < n2; i++){ max_ordinate = std::max(max_ordinate, PD2[i].second); - PD1.push_back(std::pair( ((PD2[i].first+PD2[i].second)/2), ((PD2[i].first+PD2[i].second)/2)) ); + PD1.push_back( std::pair( ((PD2[i].first+PD2[i].second)/2), ((PD2[i].first+PD2[i].second)/2) ) ); } for (int i = 0; i < n1; i++){ max_ordinate = std::max(max_ordinate, PD1[i].second); - PD2.push_back(std::pair( ((PD1[i].first+PD1[i].second)/2), ((PD1[i].first+PD1[i].second)/2)) ); + PD2.push_back( std::pair( ((PD1[i].first+PD1[i].second)/2), ((PD1[i].first+PD1[i].second)/2) ) ); } int N = PD1.size(); assert(N==PD2.size()); // Slightly perturb the points so that the PDs are in generic positions. - // ********************************************************************* int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} double thresh = pow(10,-5+mag); srand(time(NULL)); @@ -214,43 +251,30 @@ double compute_exact_SW(PD PD1, PD PD2){ } // Compute all angles in both PDs. - // ******************************* std::vector > > angles1, angles2; for (int i = 0; i < N; i++){ for (int j = i+1; j < N; j++){ - double theta1 = compute_angle(PD1,i,j); double theta2 = compute_angle(PD2,i,j); + double theta1 = Gudhi::sliced_wasserstein::compute_angle(PD1,i,j); double theta2 = Gudhi::sliced_wasserstein::compute_angle(PD2,i,j); angles1.push_back(std::pair >(theta1, std::pair(i,j))); angles2.push_back(std::pair >(theta2, std::pair(i,j))); } } // Sort angles. - // ************ std::sort(angles1.begin(), angles1.end(), sortAngle); std::sort(angles2.begin(), angles2.end(), sortAngle); - // Initialize orders of the points of both PD (given by ordinates when theta = -pi/2). - // *********************************************************************************** - PDi = PD1; PDj = PD2; + // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). std::vector orderp1, orderp2; - for (int i = 0; i < N; i++){orderp1.push_back(i); orderp2.push_back(i);} - std::sort(orderp1.begin(),orderp1.end(),compOri); std::sort(orderp2.begin(),orderp2.end(),compOrj); + for (int i = 0; i < N; i++){ orderp1.push_back(i); orderp2.push_back(i); } + std::sort( orderp1.begin(), orderp1.end(), [=](int i, int j){ if(PD1[i].second != PD1[j].second) return (PD1[i].second < PD1[j].second); else return (PD1[i].first > PD1[j].first); } ); + std::sort( orderp2.begin(), orderp2.end(), [=](int i, int j){ if(PD2[i].second != PD2[j].second) return (PD2[i].second < PD2[j].second); else return (PD2[i].first > PD2[j].first); } ); // Find the inverses of the orders. - // ******************************** std::vector order1(N); std::vector order2(N); - for(int i = 0; i < N; i++){ - for (int j = 0; j < N; j++) - if(orderp1[j] == i) - order1[i] = j; - } - for(int i = 0; i < N; i++){ - for (int j = 0; j < N; j++) - if(orderp2[j] == i) - order2[i] = j; - } + for(int i = 0; i < N; i++) for (int j = 0; j < N; j++) if(orderp1[j] == i){ order1[i] = j; break; } + for(int i = 0; i < N; i++) for (int j = 0; j < N; j++) if(orderp2[j] == i){ order2[i] = j; break; } // Record all inversions of points in the orders as theta varies along the positive half-disk. - // ******************************************************************************************* std::vector > > anglePerm1(N); std::vector > > anglePerm2(N); @@ -276,11 +300,15 @@ double compute_exact_SW(PD PD1, PD PD2){ } // Compute the SW distance with the list of inversions. - // **************************************************** - return compute_sw(anglePerm1,anglePerm2); + return Gudhi::sliced_wasserstein::compute_sw(anglePerm1, anglePerm2, PD1, PD2); } + + + + + }} #endif diff --git a/src/Kernels/include/gudhi/figtree-0.9.3.zip b/src/Kernels/include/gudhi/figtree-0.9.3.zip new file mode 100644 index 00000000..a9468274 Binary files /dev/null and b/src/Kernels/include/gudhi/figtree-0.9.3.zip differ diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake index f95d0c34..205ee8a1 100644 --- a/src/cmake/modules/GUDHI_modules.cmake +++ b/src/cmake/modules/GUDHI_modules.cmake @@ -16,8 +16,8 @@ function(add_gudhi_module file_path) endfunction(add_gudhi_module) -option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF) -option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF) +option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" ON) +option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" ON) option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON) option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON) option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON) -- cgit v1.2.3 From a065a3b86e33c24250a981f95db1ff46d9075ef5 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 29 Dec 2017 23:13:11 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3106 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d4456c36f3ef8cc9e7f516d943472b963b5e9b93 --- src/Kernels/example/CMakeLists.txt | 11 + src/Kernels/example/kernel_basic_example.cpp | 31 +- src/Kernels/include/gudhi/PSS.h | 108 ------- src/Kernels/include/gudhi/PWG.h | 204 ------------ src/Kernels/include/gudhi/SW.h | 316 ------------------- src/Kernels/include/gudhi/figtree-0.9.3.zip | Bin 1229617 -> 0 bytes src/Kernels/include/gudhi/kernel.h | 447 +++++++++++++++++++++++++++ 7 files changed, 473 insertions(+), 644 deletions(-) create mode 100644 src/Kernels/example/CMakeLists.txt delete mode 100644 src/Kernels/include/gudhi/PSS.h delete mode 100644 src/Kernels/include/gudhi/PWG.h delete mode 100644 src/Kernels/include/gudhi/SW.h delete mode 100644 src/Kernels/include/gudhi/figtree-0.9.3.zip create mode 100644 src/Kernels/include/gudhi/kernel.h diff --git a/src/Kernels/example/CMakeLists.txt b/src/Kernels/example/CMakeLists.txt new file mode 100644 index 00000000..57e13004 --- /dev/null +++ b/src/Kernels/example/CMakeLists.txt @@ -0,0 +1,11 @@ +cmake_minimum_required(VERSION 2.6) +project(Kernels_examples) + +add_executable ( BasicEx kernel_basic_example.cpp ) + +if (TBB_FOUND) + target_link_libraries(BasicEx ${TBB_LIBRARIES}) +endif() + +add_test(NAME Kernels_example_basicex COMMAND $ + "") diff --git a/src/Kernels/example/kernel_basic_example.cpp b/src/Kernels/example/kernel_basic_example.cpp index 0a8d83b3..8e9925c5 100644 --- a/src/Kernels/example/kernel_basic_example.cpp +++ b/src/Kernels/example/kernel_basic_example.cpp @@ -2,9 +2,9 @@ * (Geometric Understanding in Higher Dimensions) is a generic C++ * library for computational topology. * - * Authors: Francois Godi, small modifications by Pawel Dlotko + * Authors: Mathieu Carrière * - * Copyright (C) 2015 INRIA + * Copyright (C) 2017 INRIA * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by @@ -20,18 +20,15 @@ * along with this program. If not, see . */ -#include -//#include -//#include - -#include -#include -#include // for pair -#include // for numeric_limits +#define NUMPI 3.14159265359 +#include int main() { + std::vector< std::pair > v1, v2; + double sigma = 2; double tau = 5; + v1.emplace_back(2.7, 3.7); v1.emplace_back(9.6, 14.); v1.emplace_back(34.2, 34.974); @@ -39,11 +36,13 @@ int main() { v2.emplace_back(2.8, 4.45); v2.emplace_back(9.5, 14.1); - - double b1 = Gudhi::sliced_wasserstein::compute_approximate_SW (v1, v2); - double b2 = Gudhi::sliced_wasserstein::compute_exact_SW (v1, v2); - - std::cout << "Approximate Sliced Wasserstein distance = " << b1 << std::endl; - std::cout << "Exact Sliced Wasserstein distance = " << b2 << std::endl; + std::cout << "SW exact = " << Gudhi::kernel::sw (v1, v2) << std::endl; + std::cout << "SW approx = " << Gudhi::kernel::approx_sw (v1, v2) << std::endl; + std::cout << "PSS exact = " << Gudhi::kernel::pss (v1,v2,sigma) << std::endl; + std::cout << "PSS approx = " << Gudhi::kernel::approx_pss (v1,v2,sigma) << std::endl; + std::cout << "PWG exact = " << Gudhi::kernel::lpwg (v1,v2,sigma) << std::endl; + std::cout << "PWG approx = " << Gudhi::kernel::approx_lpwg (v1,v2,sigma) << std::endl; + std::cout << "GPWG exact = " << Gudhi::kernel::gpwg (v1,v2,sigma,tau) << std::endl; + std::cout << "GPWG approx = " << Gudhi::kernel::approx_gpwg (v1,v2,sigma,tau) << std::endl; } diff --git a/src/Kernels/include/gudhi/PSS.h b/src/Kernels/include/gudhi/PSS.h deleted file mode 100644 index 5111a09f..00000000 --- a/src/Kernels/include/gudhi/PSS.h +++ /dev/null @@ -1,108 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2017 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef PSS_H_ -#define PSS_H_ - -#define NUMPI 3.14159265359 - -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include - -#include -#include - -#include -#include -#include - -#include -#include -#include -#include -#include - -#include "figtree.h" -#include "ANN.h" - -using PD = std::vector >; - -namespace Gudhi { -namespace persistence_scale_space { - -double compute_exact_pss(PD PD1, PD PD2, double sigma = 1){ - double k = 0; - for(int i = 0; i < PD1.size(); i++){ - for(int j = 0; j < PD2.size(); j++){ - k += exp( -( pow(PD1[i].first - PD2[j].first, 2) + pow(PD1[i].second - PD2[j].second, 2) )/(8*sigma)) -\ - exp( -( pow(PD1[i].first - PD2[j].second, 2) + pow(PD1[i].second - PD2[j].first, 2) )/(8*sigma)); - } - } - return k/(8*NUMPI*sigma); -} - -double compute_approximate_pss(PD PD1, PD PD2, double sigma = 1, double error = 1e-2){ - - double k = 0; - - int d = 2; int N = PD1.size(); int M = PD2.size(); double h = std::sqrt(8*sigma); - double* x = new double[2*N]; double* y = new double[2*M]; double* q = new double[N]; - for(int i = 0; i < N; i++){ - q[i] = 1.0/(8*NUMPI*sigma); - x[2*i] = PD1[i].first; x[2*i+1] = PD1[i].second; - } - for(int i = 0; i < M; i++){ y[2*i] = PD2[i].first; y[2*i+1] = PD2[i].second; } - double* g_auto = new double[M]; - memset(g_auto, 0, sizeof(double)*M); - - figtree(d, N, M, 1, x, h, q, y, error, g_auto); - for(int i = 0; i < M; i++) k += g_auto[i]; - - for(int i = 0; i < M; i++){ y[2*i] = PD2[i].second; y[2*i+1] = PD2[i].first; } - - figtree(d, N, M, 1, x, h, q, y, error, g_auto); - for(int i = 0; i < M; i++) k -= g_auto[i]; - - delete[] x; delete[] y; delete[] q; delete[] g_auto; - return k; -} - -} // namespace persistence_scale_space - -} // namespace Gudhi - -#endif // PSS_H_ diff --git a/src/Kernels/include/gudhi/PWG.h b/src/Kernels/include/gudhi/PWG.h deleted file mode 100644 index bc491ae7..00000000 --- a/src/Kernels/include/gudhi/PWG.h +++ /dev/null @@ -1,204 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2017 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef PWG_H_ -#define PWG_H_ - -#define NUMPI 3.14159265359 - -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include - -#include -#include - -#include -#include -#include - -#include -#include -#include -#include -#include - -using PD = std::vector >; - -namespace Gudhi { -namespace persistence_weighted_gaussian { - -double compute_exact_linear_pwg(PD PD1, PD PD2, double sigma, double C, int p){ - - int num_pts1 = PD1.size(); - int num_pts2 = PD2.size(); - - double k = 0; - for(int i = 0; i < num_pts1; i++){ - for(int j = 0; j < num_pts2; j++){ - k += atan(C*pow(PD1[i].second-PD1[i].first,p))*atan(C*pow(PD2[j].second-PD2[j].first,p))*\ - exp( -( pow(PD1[i].first-PD2[j].first,2) + pow(PD1[i].second-PD2[j].second,2) )/(2*pow(sigma,2)) ); - } - } - - return k; - -} - -double compute_exact_gaussian_pwg(PD PD1, PD PD2, double sigma, double C, int p, double tau){ - - int num_pts1 = PD1.size(); - int num_pts2 = PD2.size(); - - double k1 = 0; - for(int i = 0; i < num_pts1; i++){ - for(int j = 0; j < num_pts1; j++){ - k1 += atan(C*pow(PD1[i].second-PD1[i].first,p))*atan(C*pow(PD1[j].second-PD1[j].first,p))*\ - exp( -( pow(PD1[i].first-PD1[j].first,2) + pow(PD1[i].second-PD1[j].second,2) )/(2*pow(sigma,2)) ); - } - } - - double k2 = 0; - for(int i = 0; i < num_pts2; i++){ - for(int j = 0; j < num_pts2; j++){ - k2 += atan(C*pow(PD2[i].second-PD2[i].first,p))*atan(C*pow(PD2[j].second-PD2[j].first,p))*\ - exp( -( pow(PD2[i].first-PD2[j].first,2) + pow(PD2[i].second-PD2[j].second,2) )/(2*pow(sigma,2)) ); - } - } - - double k3 = compute_exact_linear_pwg(PD1,PD2,sigma,C,p); - return exp( - (k1+k2-2*k3) / (2*pow(tau,2)) ); - -} - -double compute_exact_gaussian_RKHSdist(PD PD1, PD PD2, double sigma, double C, int p){ - - int num_pts1 = PD1.size(); - int num_pts2 = PD2.size(); - - double k1 = 0; - for(int i = 0; i < num_pts1; i++){ - for(int j = 0; j < num_pts1; j++){ - k1 += atan(C*pow(PD1[i].second-PD1[i].first,p))*atan(C*pow(PD1[j].second-PD1[j].first,p))*\ - exp( -( pow(PD1[i].first-PD1[j].first,2) + pow(PD1[i].second-PD1[j].second,2) )/(2*pow(sigma,2)) ); - } - } - - double k2 = 0; - for(int i = 0; i < num_pts2; i++){ - for(int j = 0; j < num_pts2; j++){ - k2 += atan(C*pow(PD2[i].second-PD2[i].first,p))*atan(C*pow(PD2[j].second-PD2[j].first,p))*\ - exp( -( pow(PD2[i].first-PD2[j].first,2) + pow(PD2[i].second-PD2[j].second,2) )/(2*pow(sigma,2)) ); - } - } - - double k3 = compute_exact_linear_pwg(PD1,PD2,sigma,C,p); - return std::sqrt(k1+k2-2*k3); - -} - -double compute_approximate_linear_pwg_from_Fourier_features(const std::vector >& B1, \ - const std::vector >& B2){ - double d = 0; int M = B1.size(); - for(int i = 0; i < M; i++) d += B1[i].first*B2[i].first + B1[i].second*B2[i].second; - return (1.0/M)*d; -} - -double compute_approximate_gaussian_pwg_from_Fourier_features(const std::vector >& B1, \ - const std::vector >& B2, double tau){ - int M = B1.size(); - double d3 = compute_approximate_linear_pwg_from_Fourier_features(B1, B2); - double d1 = 0; double d2 = 0; - for(int i = 0; i < M; i++){d1 += pow(B1[i].first,2) + pow(B1[i].second,2); d2 += pow(B2[i].first,2) + pow(B2[i].second,2);} - return exp( -((1.0/M)*(d1+d2)-2*d3) / (2*pow(tau,2)) ); -} - -double compute_approximate_gaussian_RKHSdist_from_Fourier_features(const std::vector >& B1, \ - const std::vector >& B2){ - int M = B1.size(); - double d3 = compute_approximate_linear_pwg_from_Fourier_features(B1, B2); - double d1 = 0; double d2 = 0; - for(int i = 0; i < M; i++){d1 += pow(B1[i].first,2) + pow(B1[i].second,2); d2 += pow(B2[i].first,2) + pow(B2[i].second,2);} - return std::sqrt((1.0/M)*(d1+d2)-2*d3); -} - -std::vector > compute_Fourier_features(double C, int p, PD D, std::vector > Z){ - int m = D.size(); std::vector > B; int M = Z.size(); - for(int i = 0; i < M; i++){ - double d1 = 0; double d2 = 0; double zx = Z[i].first; double zy = Z[i].second; - for(int j = 0; j < m; j++){ - double x = D[j].first; double y = D[j].second; - d1 += atan(C*pow(y-x,p))*cos(x*zx + y*zy); - d2 += atan(C*pow(y-x,p))*sin(x*zx + y*zy); - } - B.push_back(std::pair(d1,d2)); - } - return B; -} - -std::vector > random_Fourier(double sigma, int M = 1000){ - std::normal_distribution distrib(0,1); std::vector > Z; - std::random_device rd; - for(int i = 0; i < M; i++){ - //unsigned seedx = 2*i; unsigned seedy = 2*i+1; - //std::default_random_engine generatorx(seedx); std::default_random_engine generatory(seedy); - std::mt19937 e1(rd()); std::mt19937 e2(rd()); - double zx = distrib(e1/*generatorx*/); double zy = distrib(e2/*generatory*/); - Z.push_back(std::pair((1/sigma)*zx,(1/sigma)*zy)); - } - return Z; -} - -double compute_approximate_linear_pwg(PD PD1, PD PD2, double sigma, double C, int p, int M = 1000){ - std::vector > Z = random_Fourier(sigma, M); - std::vector > B1 = compute_Fourier_features(C,p,PD1,Z); - std::vector > B2 = compute_Fourier_features(C,p,PD2,Z); - return compute_approximate_linear_pwg_from_Fourier_features(B1,B2); -} - -double compute_approximate_gaussian_pwg(PD PD1, PD PD2, double sigma, double C, int p, double tau, int M = 1000){ - std::vector > Z = random_Fourier(sigma, M); - std::vector > B1 = compute_Fourier_features(C,p,PD1,Z); - std::vector > B2 = compute_Fourier_features(C,p,PD2,Z); - return compute_approximate_gaussian_pwg_from_Fourier_features(B1,B2,tau); -} - - -} // namespace persistence_weighted_gaussian - -} // namespace Gudhi - -#endif //PWG_H_ diff --git a/src/Kernels/include/gudhi/SW.h b/src/Kernels/include/gudhi/SW.h deleted file mode 100644 index 0b041252..00000000 --- a/src/Kernels/include/gudhi/SW.h +++ /dev/null @@ -1,316 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2017 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef SW_H_ -#define SW_H_ - -#define NUMPI 3.14159265359 - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -using PD = std::vector >; - -bool sortAngle(const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);} -bool myComp(const std::pair & P1, const std::pair & P2){return P1.second < P2.second;} - -namespace Gudhi { -namespace sliced_wasserstein { - - -// ******************************************************************** -// Approximate computation. -// ******************************************************************** - -/** \brief Computes an approximation of the Sliced Wasserstein distance between two persistence diagrams - * - * @param[in] N number of points sampled on the circle. - * - */ - -double compute_approximate_SW(PD PD1, PD PD2, int N = 100){ - - double step = NUMPI/N; double sw = 0; - - // Add projections onto diagonal. - int n1, n2; n1 = PD1.size(); n2 = PD2.size(); - for (int i = 0; i < n2; i++) - PD1.push_back(std::pair( (PD2[i].first + PD2[i].second)/2, (PD2[i].first + PD2[i].second)/2) ); - for (int i = 0; i < n1; i++) - PD2.push_back(std::pair( (PD1[i].first + PD1[i].second)/2, (PD1[i].first + PD1[i].second)/2) ); - int n = PD1.size(); - - // Sort and compare all projections. - //#pragma omp parallel for - for (int i = 0; i < N; i++){ - std::vector > L1, L2; - for (int j = 0; j < n; j++){ - L1.push_back( std::pair(j, PD1[j].first*cos(-NUMPI/2+i*step) + PD1[j].second*sin(-NUMPI/2+i*step)) ); - L2.push_back( std::pair(j, PD2[j].first*cos(-NUMPI/2+i*step) + PD2[j].second*sin(-NUMPI/2+i*step)) ); - } - std::sort(L1.begin(),L1.end(), myComp); std::sort(L2.begin(),L2.end(), myComp); - double f = 0; for (int j = 0; j < n; j++) f += std::abs(L1[j].second - L2[j].second); - sw += f*step; - } - return sw/NUMPI; -} - - - - - - - - - - - - - - - - - - - -// ******************************************************************** -// Exact computation. -// ******************************************************************** - - - -// Compute the angle formed by two points of a PD -double compute_angle(const PD & PersDiag, const int & i, const int & j){ - std::pair vect; double x1,y1, x2,y2; - x1 = PersDiag[i].first; y1 = PersDiag[i].second; - x2 = PersDiag[j].first; y2 = PersDiag[j].second; - if (y1 - y2 > 0){ - vect.first = y1 - y2; - vect.second = x2 - x1;} - else{ - if(y1 - y2 < 0){ - vect.first = y2 - y1; - vect.second = x1 - x2; - } - else{ - vect.first = 0; - vect.second = abs(x1 - x2);} - } - double norm = std::sqrt(pow(vect.first,2) + pow(vect.second,2)); - return asin(vect.second/norm); -} - -// Compute the integral of |cos()| between alpha and beta -// Valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] -double compute_int_cos(const double & alpha, const double & beta){ - double res = 0; - assert((alpha >= 0 && alpha <= NUMPI) || (alpha >= -NUMPI && alpha <= 0)); - if (alpha >= 0 && alpha <= NUMPI){ - if (cos(alpha) >= 0){ - if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} - else{res = sin(beta)-sin(alpha);} - } - else{ - if(1.5*NUMPI <= beta){res = 2+sin(alpha)+sin(beta);} - else{res = sin(alpha)-sin(beta);} - } - } - if (alpha >= -NUMPI && alpha <= 0){ - if (cos(alpha) <= 0){ - if(-NUMPI/2 <= beta){res = 2+sin(alpha)+sin(beta);} - else{res = sin(alpha)-sin(beta);} - } - else{ - if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} - else{res = sin(beta)-sin(alpha);} - } - } - return res; -} - -double compute_int(const double & theta1, const double & theta2, const int & p, const int & q, const PD & PD1, const PD & PD2){ - double norm = std::sqrt(pow(PD1[p].first-PD2[q].first,2) + pow(PD1[p].second-PD2[q].second,2)); - double angle1; - if (PD1[p].first > PD2[q].first) - angle1 = theta1 - asin( (PD1[p].second-PD2[q].second)/norm ); - else - angle1 = theta1 - asin( (PD2[q].second-PD1[p].second)/norm ); - double angle2 = angle1 + theta2 - theta1; - double integral = compute_int_cos(angle1,angle2); - return norm*integral; -} - - - -double compute_sw(const std::vector > > & V1, const std::vector > > & V2, const PD & PD1, const PD & PD2){ - int N = V1.size(); double sw = 0; - for (int i = 0; i < N; i++){ - std::vector > U,V; U = V1[i]; V = V2[i]; - double theta1, theta2; theta1 = -NUMPI/2; - unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); - while(theta1 != NUMPI/2){ - if(PD1[U[ku].first].first != PD2[V[kv].first].first || PD1[U[ku].first].second != PD2[V[kv].first].second) - if(theta1 != theta2) - sw += Gudhi::sliced_wasserstein::compute_int(theta1, theta2, U[ku].first, V[kv].first, PD1, PD2); - theta1 = theta2; - if ( (theta2 == U[ku].second) && ku < U.size()-1 ) ku++; - if ( (theta2 == V[kv].second) && kv < V.size()-1 ) kv++; - theta2 = std::min(U[ku].second, V[kv].second); - } - } - return sw/NUMPI; -} - - - - - - - - - - - - - - - - - - - -/** \brief Computes the Sliced Wasserstein distance between two persistence diagrams - * - */ - -double compute_exact_SW(PD PD1, PD PD2){ - - // Add projections onto diagonal. - int n1, n2; n1 = PD1.size(); n2 = PD2.size(); double max_ordinate = std::numeric_limits::lowest(); - for (int i = 0; i < n2; i++){ - max_ordinate = std::max(max_ordinate, PD2[i].second); - PD1.push_back( std::pair( ((PD2[i].first+PD2[i].second)/2), ((PD2[i].first+PD2[i].second)/2) ) ); - } - for (int i = 0; i < n1; i++){ - max_ordinate = std::max(max_ordinate, PD1[i].second); - PD2.push_back( std::pair( ((PD1[i].first+PD1[i].second)/2), ((PD1[i].first+PD1[i].second)/2) ) ); - } - int N = PD1.size(); assert(N==PD2.size()); - - // Slightly perturb the points so that the PDs are in generic positions. - int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} - double thresh = pow(10,-5+mag); - srand(time(NULL)); - for (int i = 0; i < N; i++){ - PD1[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD1[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); - PD2[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD2[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); - } - - // Compute all angles in both PDs. - std::vector > > angles1, angles2; - for (int i = 0; i < N; i++){ - for (int j = i+1; j < N; j++){ - double theta1 = Gudhi::sliced_wasserstein::compute_angle(PD1,i,j); double theta2 = Gudhi::sliced_wasserstein::compute_angle(PD2,i,j); - angles1.push_back(std::pair >(theta1, std::pair(i,j))); - angles2.push_back(std::pair >(theta2, std::pair(i,j))); - } - } - - // Sort angles. - std::sort(angles1.begin(), angles1.end(), sortAngle); std::sort(angles2.begin(), angles2.end(), sortAngle); - - // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). - std::vector orderp1, orderp2; - for (int i = 0; i < N; i++){ orderp1.push_back(i); orderp2.push_back(i); } - std::sort( orderp1.begin(), orderp1.end(), [=](int i, int j){ if(PD1[i].second != PD1[j].second) return (PD1[i].second < PD1[j].second); else return (PD1[i].first > PD1[j].first); } ); - std::sort( orderp2.begin(), orderp2.end(), [=](int i, int j){ if(PD2[i].second != PD2[j].second) return (PD2[i].second < PD2[j].second); else return (PD2[i].first > PD2[j].first); } ); - - // Find the inverses of the orders. - std::vector order1(N); std::vector order2(N); - for(int i = 0; i < N; i++) for (int j = 0; j < N; j++) if(orderp1[j] == i){ order1[i] = j; break; } - for(int i = 0; i < N; i++) for (int j = 0; j < N; j++) if(orderp2[j] == i){ order2[i] = j; break; } - - // Record all inversions of points in the orders as theta varies along the positive half-disk. - std::vector > > anglePerm1(N); - std::vector > > anglePerm2(N); - - int M1 = angles1.size(); - for (int i = 0; i < M1; i++){ - double theta = angles1[i].first; int p = angles1[i].second.first; int q = angles1[i].second.second; - anglePerm1[order1[p]].push_back(std::pair(p,theta)); - anglePerm1[order1[q]].push_back(std::pair(q,theta)); - int a = order1[p]; int b = order1[q]; order1[p] = b; order1[q] = a; - } - - int M2 = angles2.size(); - for (int i = 0; i < M2; i++){ - double theta = angles2[i].first; int p = angles2[i].second.first; int q = angles2[i].second.second; - anglePerm2[order2[p]].push_back(std::pair(p,theta)); - anglePerm2[order2[q]].push_back(std::pair(q,theta)); - int a = order2[p]; int b = order2[q]; order2[p] = b; order2[q] = a; - } - - for (int i = 0; i < N; i++){ - anglePerm1[order1[i]].push_back(std::pair(i,NUMPI/2)); - anglePerm2[order2[i]].push_back(std::pair(i,NUMPI/2)); - } - - // Compute the SW distance with the list of inversions. - return Gudhi::sliced_wasserstein::compute_sw(anglePerm1, anglePerm2, PD1, PD2); - -} - - - - - - -}} - -#endif - - diff --git a/src/Kernels/include/gudhi/figtree-0.9.3.zip b/src/Kernels/include/gudhi/figtree-0.9.3.zip deleted file mode 100644 index a9468274..00000000 Binary files a/src/Kernels/include/gudhi/figtree-0.9.3.zip and /dev/null differ diff --git a/src/Kernels/include/gudhi/kernel.h b/src/Kernels/include/gudhi/kernel.h new file mode 100644 index 00000000..c4120d7a --- /dev/null +++ b/src/Kernels/include/gudhi/kernel.h @@ -0,0 +1,447 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef KERNEL_H_ +#define KERNEL_H_ + +#define NUMPI 3.14159265359 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using PD = std::vector >; +bool sortAngle(const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);} +bool myComp(const std::pair & P1, const std::pair & P2){return P1.second < P2.second;} + +namespace Gudhi { +namespace kernel { + + +double pss_weight(std::pair P){ + if(P.second > P.first) return 1; + else return -1; +} + + + + +// ******************************************************************** +// Exact computation. +// ******************************************************************** + +/** \brief Computes the Linear Persistence Weighted Gaussian Kernel between two persistence diagrams. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] weight weight function for the points in the diagrams. + * + */ +double lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ + int num_pts1 = PD1.size(); int num_pts2 = PD2.size(); double k = 0; + for(int i = 0; i < num_pts1; i++) + for(int j = 0; j < num_pts2; j++) + k += (*weight)(PD1[i])*(*weight)(PD2[j])*exp(-(pow(PD1[i].first-PD2[j].first,2) + pow(PD1[i].second-PD2[j].second,2))/(2*pow(sigma,2))); + return k; +} + +/** \brief Computes the Persistence Scale Space Kernel between two persistence diagrams. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * + */ +double pss(PD PD1, PD PD2, double sigma){ + PD pd1 = PD1; int numpts = PD1.size(); for(int i = 0; i < numpts; i++) pd1.push_back(std::pair(PD1[i].second,PD1[i].first)); + PD pd2 = PD2; numpts = PD2.size(); for(int i = 0; i < numpts; i++) pd2.push_back(std::pair(PD2[i].second,PD2[i].first)); + return lpwg(pd1, pd2, 2*sqrt(sigma), &pss_weight) / (2*8*NUMPI*sigma); +} + +/** \brief Computes the Gaussian Persistence Weighted Gaussian Kernel between two persistence diagrams. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] tau bandwidth parameter of the Gaussian Kernel used between the embeddings. + * @param[in] weight weight function for the points in the diagrams. + * + */ +double gpwg(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ + double k1 = lpwg(PD1,PD1,sigma,weight); + double k2 = lpwg(PD2,PD2,sigma,weight); + double k3 = lpwg(PD1,PD2,sigma,weight); + return exp( - (k1+k2-2*k3) / (2*pow(tau,2)) ); +} + +/** \brief Computes the RKHS distance induced by the Gaussian Kernel Embedding between two persistence diagrams. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] weight weight function for the points in the diagrams. + * + */ +double dpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ + double k1 = lpwg(PD1,PD1,sigma,weight); + double k2 = lpwg(PD2,PD2,sigma,weight); + double k3 = lpwg(PD1,PD2,sigma,weight); + return std::sqrt(k1+k2-2*k3); +} + +// Compute the angle formed by two points of a PD +double compute_angle(const PD & PersDiag, const int & i, const int & j){ + std::pair vect; double x1,y1, x2,y2; + x1 = PersDiag[i].first; y1 = PersDiag[i].second; + x2 = PersDiag[j].first; y2 = PersDiag[j].second; + if (y1 - y2 > 0){ + vect.first = y1 - y2; + vect.second = x2 - x1;} + else{ + if(y1 - y2 < 0){ + vect.first = y2 - y1; + vect.second = x1 - x2; + } + else{ + vect.first = 0; + vect.second = abs(x1 - x2);} + } + double norm = std::sqrt(pow(vect.first,2) + pow(vect.second,2)); + return asin(vect.second/norm); +} + +// Compute the integral of |cos()| between alpha and beta +// Valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] +double compute_int_cos(const double & alpha, const double & beta){ + double res = 0; + assert((alpha >= 0 && alpha <= NUMPI) || (alpha >= -NUMPI && alpha <= 0)); + if (alpha >= 0 && alpha <= NUMPI){ + if (cos(alpha) >= 0){ + if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} + else{res = sin(beta)-sin(alpha);} + } + else{ + if(1.5*NUMPI <= beta){res = 2+sin(alpha)+sin(beta);} + else{res = sin(alpha)-sin(beta);} + } + } + if (alpha >= -NUMPI && alpha <= 0){ + if (cos(alpha) <= 0){ + if(-NUMPI/2 <= beta){res = 2+sin(alpha)+sin(beta);} + else{res = sin(alpha)-sin(beta);} + } + else{ + if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} + else{res = sin(beta)-sin(alpha);} + } + } + return res; +} + +double compute_int(const double & theta1, const double & theta2, const int & p, const int & q, const PD & PD1, const PD & PD2){ + double norm = std::sqrt(pow(PD1[p].first-PD2[q].first,2) + pow(PD1[p].second-PD2[q].second,2)); + double angle1; + if (PD1[p].first > PD2[q].first) + angle1 = theta1 - asin( (PD1[p].second-PD2[q].second)/norm ); + else + angle1 = theta1 - asin( (PD2[q].second-PD1[p].second)/norm ); + double angle2 = angle1 + theta2 - theta1; + double integral = compute_int_cos(angle1,angle2); + return norm*integral; +} + + + +double compute_sw(const std::vector > > & V1, const std::vector > > & V2, const PD & PD1, const PD & PD2){ + int N = V1.size(); double sw = 0; + for (int i = 0; i < N; i++){ + std::vector > U,V; U = V1[i]; V = V2[i]; + double theta1, theta2; theta1 = -NUMPI/2; + unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); + while(theta1 != NUMPI/2){ + if(PD1[U[ku].first].first != PD2[V[kv].first].first || PD1[U[ku].first].second != PD2[V[kv].first].second) + if(theta1 != theta2) + sw += compute_int(theta1, theta2, U[ku].first, V[kv].first, PD1, PD2); + theta1 = theta2; + if ( (theta2 == U[ku].second) && ku < U.size()-1 ) ku++; + if ( (theta2 == V[kv].second) && kv < V.size()-1 ) kv++; + theta2 = std::min(U[ku].second, V[kv].second); + } + } + return sw/NUMPI; +} + +/** \brief Computes the Sliced Wasserstein distance between two persistence diagrams. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * + */ + double sw(PD PD1, PD PD2){ + + // Add projections onto diagonal. + int n1, n2; n1 = PD1.size(); n2 = PD2.size(); double max_ordinate = std::numeric_limits::lowest(); + for (int i = 0; i < n2; i++){ + max_ordinate = std::max(max_ordinate, PD2[i].second); + PD1.push_back( std::pair( ((PD2[i].first+PD2[i].second)/2), ((PD2[i].first+PD2[i].second)/2) ) ); + } + for (int i = 0; i < n1; i++){ + max_ordinate = std::max(max_ordinate, PD1[i].second); + PD2.push_back( std::pair( ((PD1[i].first+PD1[i].second)/2), ((PD1[i].first+PD1[i].second)/2) ) ); + } + int N = PD1.size(); assert(N==PD2.size()); + + // Slightly perturb the points so that the PDs are in generic positions. + int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} + double thresh = pow(10,-5+mag); + srand(time(NULL)); + for (int i = 0; i < N; i++){ + PD1[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD1[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); + PD2[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD2[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); + } + + // Compute all angles in both PDs. + std::vector > > angles1, angles2; + for (int i = 0; i < N; i++){ + for (int j = i+1; j < N; j++){ + double theta1 = compute_angle(PD1,i,j); double theta2 = compute_angle(PD2,i,j); + angles1.push_back(std::pair >(theta1, std::pair(i,j))); + angles2.push_back(std::pair >(theta2, std::pair(i,j))); + } + } + + // Sort angles. + std::sort(angles1.begin(), angles1.end(), sortAngle); std::sort(angles2.begin(), angles2.end(), sortAngle); + + // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). + std::vector orderp1, orderp2; + for (int i = 0; i < N; i++){ orderp1.push_back(i); orderp2.push_back(i); } + std::sort( orderp1.begin(), orderp1.end(), [=](int i, int j){ if(PD1[i].second != PD1[j].second) return (PD1[i].second < PD1[j].second); else return (PD1[i].first > PD1[j].first); } ); + std::sort( orderp2.begin(), orderp2.end(), [=](int i, int j){ if(PD2[i].second != PD2[j].second) return (PD2[i].second < PD2[j].second); else return (PD2[i].first > PD2[j].first); } ); + + // Find the inverses of the orders. + std::vector order1(N); std::vector order2(N); + for(int i = 0; i < N; i++) for (int j = 0; j < N; j++) if(orderp1[j] == i){ order1[i] = j; break; } + for(int i = 0; i < N; i++) for (int j = 0; j < N; j++) if(orderp2[j] == i){ order2[i] = j; break; } + + // Record all inversions of points in the orders as theta varies along the positive half-disk. + std::vector > > anglePerm1(N); + std::vector > > anglePerm2(N); + + int M1 = angles1.size(); + for (int i = 0; i < M1; i++){ + double theta = angles1[i].first; int p = angles1[i].second.first; int q = angles1[i].second.second; + anglePerm1[order1[p]].push_back(std::pair(p,theta)); + anglePerm1[order1[q]].push_back(std::pair(q,theta)); + int a = order1[p]; int b = order1[q]; order1[p] = b; order1[q] = a; + } + + int M2 = angles2.size(); + for (int i = 0; i < M2; i++){ + double theta = angles2[i].first; int p = angles2[i].second.first; int q = angles2[i].second.second; + anglePerm2[order2[p]].push_back(std::pair(p,theta)); + anglePerm2[order2[q]].push_back(std::pair(q,theta)); + int a = order2[p]; int b = order2[q]; order2[p] = b; order2[q] = a; + } + + for (int i = 0; i < N; i++){ + anglePerm1[order1[i]].push_back(std::pair(i,NUMPI/2)); + anglePerm2[order2[i]].push_back(std::pair(i,NUMPI/2)); + } + + // Compute the SW distance with the list of inversions. + return compute_sw(anglePerm1, anglePerm2, PD1, PD2); + +} + + + + + + + + + +// ******************************************************************** +// Approximate computation. +// ******************************************************************** + +double approx_lpwg_Fourier(const std::vector >& B1, const std::vector >& B2){ + double d = 0; int M = B1.size(); + for(int i = 0; i < M; i++) d += B1[i].first*B2[i].first + B1[i].second*B2[i].second; + return (1.0/M)*d; +} + +double approx_gpwg_Fourier(const std::vector >& B1, const std::vector >& B2, double tau){ + int M = B1.size(); + double d3 = approx_lpwg_Fourier(B1, B2); + double d1 = 0; double d2 = 0; + for(int i = 0; i < M; i++){d1 += pow(B1[i].first,2) + pow(B1[i].second,2); d2 += pow(B2[i].first,2) + pow(B2[i].second,2);} + return exp( -((1.0/M)*(d1+d2)-2*d3) / (2*pow(tau,2)) ); +} + +double approx_dpwg_Fourier(const std::vector >& B1, const std::vector >& B2){ + int M = B1.size(); + double d3 = approx_lpwg_Fourier(B1, B2); + double d1 = 0; double d2 = 0; + for(int i = 0; i < M; i++){d1 += pow(B1[i].first,2) + pow(B1[i].second,2); d2 += pow(B2[i].first,2) + pow(B2[i].second,2);} + return std::sqrt((1.0/M)*(d1+d2)-2*d3); +} + +std::vector > Fourier_feat(PD D, std::vector > Z, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ + int m = D.size(); std::vector > B; int M = Z.size(); + for(int i = 0; i < M; i++){ + double d1 = 0; double d2 = 0; double zx = Z[i].first; double zy = Z[i].second; + for(int j = 0; j < m; j++){ + double x = D[j].first; double y = D[j].second; + d1 += (*weight)(D[j])*cos(x*zx + y*zy); + d2 += (*weight)(D[j])*sin(x*zx + y*zy); + } + B.push_back(std::pair(d1,d2)); + } + return B; +} + +std::vector > random_Fourier(double sigma, int M = 1000){ + std::normal_distribution distrib(0,1); std::vector > Z; std::random_device rd; + for(int i = 0; i < M; i++){ + std::mt19937 e1(rd()); std::mt19937 e2(rd()); + double zx = distrib(e1); double zy = distrib(e2); + Z.push_back(std::pair((1.0/sigma)*zx,(1.0/sigma)*zy)); + } + return Z; +} + + +/** \brief Computes an approximation of the Linear Persistence Weighted Gaussian Kernel between two persistence diagrams with random Fourier features. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] weight weight function for the points in the diagrams. + * @param[in] M number of Fourier features. + * + */ +double approx_lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ + std::vector > Z = random_Fourier(sigma, M); + std::vector > B1 = Fourier_feat(PD1,Z,weight); + std::vector > B2 = Fourier_feat(PD2,Z,weight); + return approx_lpwg_Fourier(B1,B2); +} + +/** \brief Computes an approximation of the Persistence Scale Space Kernel between two persistence diagrams with random Fourier features. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] M number of Fourier features. + * + */ +double approx_pss(PD PD1, PD PD2, double sigma, int M = 1000){ + PD pd1 = PD1; int numpts = PD1.size(); for(int i = 0; i < numpts; i++) pd1.push_back(std::pair(PD1[i].second,PD1[i].first)); + PD pd2 = PD2; numpts = PD2.size(); for(int i = 0; i < numpts; i++) pd2.push_back(std::pair(PD2[i].second,PD2[i].first)); + return approx_lpwg(pd1, pd2, 2*sqrt(sigma), &pss_weight, M) / (2*8*NUMPI*sigma); +} + + +/** \brief Computes an approximation of the Gaussian Persistence Weighted Gaussian Kernel between two persistence diagrams with random Fourier features. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] tau bandwidth parameter of the Gaussian Kernel used between the embeddings. + * @param[in] weight weight function for the points in the diagrams. + * @param[in] M number of Fourier features. + * + */ +double approx_gpwg(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ + std::vector > Z = random_Fourier(sigma, M); + std::vector > B1 = Fourier_feat(PD1,Z,weight); + std::vector > B2 = Fourier_feat(PD2,Z,weight); + return approx_gpwg_Fourier(B1,B2,tau); +} + + +/** \brief Computes an approximation of the Sliced Wasserstein distance between two persistence diagrams. + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] N number of points sampled on the circle. + * + */ +double approx_sw(PD PD1, PD PD2, int N = 100){ + + double step = NUMPI/N; double sw = 0; + + // Add projections onto diagonal. + int n1, n2; n1 = PD1.size(); n2 = PD2.size(); + for (int i = 0; i < n2; i++) + PD1.push_back(std::pair( (PD2[i].first + PD2[i].second)/2, (PD2[i].first + PD2[i].second)/2) ); + for (int i = 0; i < n1; i++) + PD2.push_back(std::pair( (PD1[i].first + PD1[i].second)/2, (PD1[i].first + PD1[i].second)/2) ); + int n = PD1.size(); + + // Sort and compare all projections. + //#pragma omp parallel for + for (int i = 0; i < N; i++){ + std::vector > L1, L2; + for (int j = 0; j < n; j++){ + L1.push_back( std::pair(j, PD1[j].first*cos(-NUMPI/2+i*step) + PD1[j].second*sin(-NUMPI/2+i*step)) ); + L2.push_back( std::pair(j, PD2[j].first*cos(-NUMPI/2+i*step) + PD2[j].second*sin(-NUMPI/2+i*step)) ); + } + std::sort(L1.begin(),L1.end(), myComp); std::sort(L2.begin(),L2.end(), myComp); + double f = 0; for (int j = 0; j < n; j++) f += std::abs(L1[j].second - L2[j].second); + sw += f*step; + } + return sw/NUMPI; +} + + + +} // namespace kernel + +} // namespace Gudhi + +#endif //KERNEL_H_ -- cgit v1.2.3 From 5708c93251625133598739f42ed106aac83bf18a Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 29 Dec 2017 23:39:32 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3107 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d0f6606d7afde132a4d86203eeff80e97f35adce --- src/Kernels/doc/COPYRIGHT | 19 ++++++++++++ src/Kernels/doc/Intro_kernels.h | 61 ++++++++++++++++++++++++++++++++++++++ src/Kernels/include/gudhi/kernel.h | 9 ++++++ 3 files changed, 89 insertions(+) create mode 100644 src/Kernels/doc/COPYRIGHT create mode 100644 src/Kernels/doc/Intro_kernels.h diff --git a/src/Kernels/doc/COPYRIGHT b/src/Kernels/doc/COPYRIGHT new file mode 100644 index 00000000..0c36a526 --- /dev/null +++ b/src/Kernels/doc/COPYRIGHT @@ -0,0 +1,19 @@ +The files of this directory are part of the Gudhi Library. The Gudhi library +(Geometric Understanding in Higher Dimensions) is a generic C++ library for +computational topology. + +Author(s): Mathieu Carrière + +Copyright (C) 2017 INRIA + +This program is free software: you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation, either version 3 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program. If not, see . diff --git a/src/Kernels/doc/Intro_kernels.h b/src/Kernels/doc/Intro_kernels.h new file mode 100644 index 00000000..be97a6cf --- /dev/null +++ b/src/Kernels/doc/Intro_kernels.h @@ -0,0 +1,61 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef DOC_KERNEL_INTRO_KERNEL_H_ +#define DOC_KERNEL_INTRO_KERNEL_H_ + +namespace Gudhi { + +namespace kernel { + +/** \defgroup kernel Kernels + * + * \author Mathieu Carrière + * + * @{ + * + * Kernels are generalized scalar products. They take the form of functions whose evaluations on pairs of persistence diagrams are equal + * to the scalar products of the images of the diagrams under some feature map into a (generally unknown and infinite dimensional) + * Hilbert space. Kernels are + * very useful to handle any type of data for algorithms that require at least a Hilbert structure, such as Principal Component Analysis + * or Support Vector Machines. In this package, we implement three kernels for persistence diagrams: the Persistence Scale Space kernel, + * the Persistence Weighted Gaussian kernel and the Sliced Wasserstein kernel. + * + * + * When launching: + * + * \code $> ./BasicEx + * \endcode + * + * the program output is: + * + * + * \copyright GNU General Public License v3. + * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim + */ +/** @} */ // end defgroup kernel + +} // namespace kernel + +} // namespace Gudhi + +#endif // DOC_KERNEL_INTRO_KERNEL_H_ diff --git a/src/Kernels/include/gudhi/kernel.h b/src/Kernels/include/gudhi/kernel.h index c4120d7a..6429efed 100644 --- a/src/Kernels/include/gudhi/kernel.h +++ b/src/Kernels/include/gudhi/kernel.h @@ -72,6 +72,7 @@ double pss_weight(std::pair P){ // ******************************************************************** /** \brief Computes the Linear Persistence Weighted Gaussian Kernel between two persistence diagrams. + * \ingroup kernel * * @param[in] PD1 first persistence diagram. * @param[in] PD2 second persistence diagram. @@ -88,6 +89,7 @@ double lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair > > & V1, } /** \brief Computes the Sliced Wasserstein distance between two persistence diagrams. + * \ingroup kernel * * @param[in] PD1 first persistence diagram. * @param[in] PD2 second persistence diagram. @@ -356,6 +361,7 @@ std::vector > random_Fourier(double sigma, int M = 1000 /** \brief Computes an approximation of the Linear Persistence Weighted Gaussian Kernel between two persistence diagrams with random Fourier features. + * \ingroup kernel * * @param[in] PD1 first persistence diagram. * @param[in] PD2 second persistence diagram. @@ -372,6 +378,7 @@ double approx_lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair Date: Wed, 3 Jan 2018 15:30:49 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3110 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 95fd5b9e4cd4738ecdeaeb27f20d267816b7bead --- biblio/bibliography.bib | 22 +++++++ data/persistence_diagram/PD1 | 3 + data/persistence_diagram/PD2 | 2 + src/Kernels/doc/Intro_kernels.h | 53 ++++++++++++++++- src/Kernels/example/kernel.txt | 8 +++ src/Kernels/example/kernel_basic_example.cpp | 48 ++++++++++------ src/Kernels/include/gudhi/kernel.h | 86 +++++++++++++++++----------- src/Kernels/test/CMakeLists.txt | 14 +++++ src/Kernels/test/test_kernel.cpp | 56 ++++++++++++++++++ 9 files changed, 239 insertions(+), 53 deletions(-) create mode 100644 data/persistence_diagram/PD1 create mode 100644 data/persistence_diagram/PD2 create mode 100644 src/Kernels/example/kernel.txt create mode 100644 src/Kernels/test/CMakeLists.txt create mode 100644 src/Kernels/test/test_kernel.cpp diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index b101cb76..e56734e4 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -1072,3 +1072,25 @@ language={English} + +@InProceedings{pmlr-v70-carriere17a, + title = {Sliced {W}asserstein Kernel for Persistence Diagrams}, + author = {Mathieu Carri{\`e}re and Marco Cuturi and Steve Oudot}, + booktitle = {Proceedings of the 34th International Conference on Machine Learning}, + pages = {664--673}, + year = {2017}, + editor = {Doina Precup and Yee Whye Teh}, + volume = {70}, + series = {Proceedings of Machine Learning Research}, + address = {International Convention Centre, Sydney, Australia}, + month = {06--11 Aug}, + publisher = {PMLR}, +} + +@INPROCEEDINGS{Rahimi07randomfeatures, + author = {Ali Rahimi and Ben Recht}, + title = {Random features for large-scale kernel machines}, + booktitle = {In Neural Information Processing Systems}, + year = {2007} +} + diff --git a/data/persistence_diagram/PD1 b/data/persistence_diagram/PD1 new file mode 100644 index 00000000..404199b4 --- /dev/null +++ b/data/persistence_diagram/PD1 @@ -0,0 +1,3 @@ +2.7 3.7 +9.6 14 +34.2 34.974 \ No newline at end of file diff --git a/data/persistence_diagram/PD2 b/data/persistence_diagram/PD2 new file mode 100644 index 00000000..125d8e4b --- /dev/null +++ b/data/persistence_diagram/PD2 @@ -0,0 +1,2 @@ +2.8 4.45 +9.5 14.1 \ No newline at end of file diff --git a/src/Kernels/doc/Intro_kernels.h b/src/Kernels/doc/Intro_kernels.h index be97a6cf..163690b1 100644 --- a/src/Kernels/doc/Intro_kernels.h +++ b/src/Kernels/doc/Intro_kernels.h @@ -37,17 +37,64 @@ namespace kernel { * to the scalar products of the images of the diagrams under some feature map into a (generally unknown and infinite dimensional) * Hilbert space. Kernels are * very useful to handle any type of data for algorithms that require at least a Hilbert structure, such as Principal Component Analysis - * or Support Vector Machines. In this package, we implement three kernels for persistence diagrams: the Persistence Scale Space kernel, - * the Persistence Weighted Gaussian kernel and the Sliced Wasserstein kernel. + * or Support Vector Machines. In this package, we implement three kernels for persistence diagrams: + * the Persistence Scale Space Kernel (PSSK)---see \cite Reininghaus_Huber_ALL_PSSK, + * the Persistence Weighted Gaussian Kernel (PWGK)---see \cite Kusano_Fukumizu_Hiraoka_PWGK, + * and the Sliced Wasserstein Kernel (SWK)---see \cite pmlr-v70-carriere17a. * + * \section pwg Persistence Weighted Gaussian Kernel and Persistence Scale Space Kernel + * + * The PWGK is built with Gaussian Kernel Mean Embedding, meaning that each persistence diagram is first + * sent to the Hilbert space of a Gaussian kernel with bandwidth parameter \f$\sigma >0\f$ using a weighted mean embedding \f$\Phi\f$: + * + * \f$ \Phi\,:\,D\,\rightarrow\,\sum_{p\in D}\,w(p)\,{\rm exp}\left(-\frac{\|p-\cdot\|_2^2}{2\sigma^2}\right) \f$, + * + * Usually, the weight function is chosen to be an arctan function of the distance of the point to the diagonal: + * \f$w(p) = {\rm arctan}(C\,|y-x|^\alpha)\f$, for some parameters \f$C,\alpha >0\f$. + * Then, either their scalar product in this space is + * computed (Linear Persistence Weighted Gaussian Kernel): + * + * \f$ LPWGK(D_1,D_2)=\langle\Phi(D_1),\Phi(D_2)\rangle + * \,=\,\sum_{p\in D_1}\,\sum_{q\in D_2}\,w(p)\,w(q)\,{\rm exp}\left(-\frac{\|p-q\|_2^2}{2\sigma^2}\right)\f$, + * + * or a second Gaussian kernel with bandwidth parameter \f$\tau >0\f$ is applied to their distance in this space + * (Gaussian Persistence Weighted Gaussian Kernel): + * + * \f$ GPWGK(D_1,D_2)={\rm exp}\left(-\frac{\|\Phi(D_1)-\Phi(D_2)\|^2}{2\tau^2} \right)\f$, + * where \f$\|\Phi(D_1)-\Phi(D_2)\|^2 = \langle\Phi(D_1)-\Phi(D_2),\Phi(D_1)-\Phi(D_2)\rangle\f$. + * + * It follows that the computation time is \f$O(n^2)\f$ where \f$n\f$ is the number of points + * in the diagrams. This time can be improved by computing approximations of the kernel + * with \f$m\f$ Fourier features \cite Rahimi07randomfeatures. In that case, the computation time becomes \f$O(mn)\f$. + * + * The PSSK is a Linear Persistence Weighted Gaussian Kernel between modified diagrams: + * the symmetric of each point with respect to the diagonal is first added in each diagram, and then the weight function + * is set to be +1 if the point is above the diagonal and -1 otherwise. + * + * \section sw Sliced Wasserstein Kernel + * + * The Sliced Wasserstein Kernel is defined as a Gaussian-like Kernel between persistence diagrams, where the distance used for + * comparison is the Sliced Wasserstein distance \f$SW\f$ between persistence diagrams, defined as the integral of the 1-norm + * between the sorted projections of the diagrams onto all lines passing through the origin: + * + * \f$ SW(D_1,D_2)=\int_{\theta\in\mathbb{S}}\,\|\pi_\theta(D_1\cup\pi_\Delta(D_2))-\pi_\theta(D_2\cup\pi_\Delta(D_1))\|_1{\rm d}\theta\f$, + * + * where \f$\pi_\theta\f$ is the projection onto the line defined with angle \f$\theta\f$ in the unit circle \f$\mathbb{S}\f$, + * and \f$\pi_\Delta\f$ is the projection onto the diagonal. + * The integral can be either computed exactly in \f$O(n^2{\rm log}(n))\f$ time, where \f$n\f$ is the number of points + * in the diagrams, or approximated by sampling \f$m\f$ lines in the circle in \f$O(mn{\rm log}(n))\f$ time. The SWK is then computed as: + * + * \f$ SWK(D_1,D_2) = {\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right).\f$ * * When launching: * - * \code $> ./BasicEx + * \code $> ./BasicEx ../../../../data/persistence_diagram/PD1 ../../../../data/persistence_diagram/PD2 * \endcode * * the program output is: * + * \include Kernels/kernel.txt + * * * \copyright GNU General Public License v3. * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim diff --git a/src/Kernels/example/kernel.txt b/src/Kernels/example/kernel.txt new file mode 100644 index 00000000..5fb8b504 --- /dev/null +++ b/src/Kernels/example/kernel.txt @@ -0,0 +1,8 @@ +SWK exact = 0.875446 +SWK approx = 0.875204 +PSSK exact = 0.0218669 +PSSK approx = 0.0213766 +LPWGK exact = 2.57351 +LPWGK approx = 2.49102 +GPWGK exact = 0.98783 +GPWGK approx = 0.987591 \ No newline at end of file diff --git a/src/Kernels/example/kernel_basic_example.cpp b/src/Kernels/example/kernel_basic_example.cpp index 8e9925c5..46e42c9d 100644 --- a/src/Kernels/example/kernel_basic_example.cpp +++ b/src/Kernels/example/kernel_basic_example.cpp @@ -20,29 +20,41 @@ * along with this program. If not, see . */ -#define NUMPI 3.14159265359 #include -int main() { +void usage(int nbArgs, char *const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " PD1 PD2 \n"; + std::cerr << " i.e.: " << progName << " ../../../../data/persistence_diagram/PD1 ../../../../data/persistence_diagram/PD2 \n"; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { - std::vector< std::pair > v1, v2; + if (argc != 3) usage(argc, argv[0]); double sigma = 2; double tau = 5; - v1.emplace_back(2.7, 3.7); - v1.emplace_back(9.6, 14.); - v1.emplace_back(34.2, 34.974); - - v2.emplace_back(2.8, 4.45); - v2.emplace_back(9.5, 14.1); - - std::cout << "SW exact = " << Gudhi::kernel::sw (v1, v2) << std::endl; - std::cout << "SW approx = " << Gudhi::kernel::approx_sw (v1, v2) << std::endl; - std::cout << "PSS exact = " << Gudhi::kernel::pss (v1,v2,sigma) << std::endl; - std::cout << "PSS approx = " << Gudhi::kernel::approx_pss (v1,v2,sigma) << std::endl; - std::cout << "PWG exact = " << Gudhi::kernel::lpwg (v1,v2,sigma) << std::endl; - std::cout << "PWG approx = " << Gudhi::kernel::approx_lpwg (v1,v2,sigma) << std::endl; - std::cout << "GPWG exact = " << Gudhi::kernel::gpwg (v1,v2,sigma,tau) << std::endl; - std::cout << "GPWG approx = " << Gudhi::kernel::approx_gpwg (v1,v2,sigma,tau) << std::endl; + std::string PDname1(argv[1]); std::string PDname2(argv[2]); + std::vector< std::pair > v1, v2; std::string line; double b,d; + + std::ifstream input1(PDname1); + while(std::getline(input1,line)){ + std::stringstream stream(line); stream >> b; stream >> d; v1.push_back(std::pair(b,d)); + } + + std::ifstream input2(PDname2); + while(std::getline(input2,line)){ + std::stringstream stream(line); stream >> b; stream >> d; v2.push_back(std::pair(b,d)); + } + + std::cout << "SWK exact = " << Gudhi::kernel::swk (v1,v2,sigma) << std::endl; + std::cout << "SWK approx = " << Gudhi::kernel::approx_swk (v1,v2,sigma) << std::endl; + std::cout << "PSSK exact = " << Gudhi::kernel::pssk (v1,v2,sigma) << std::endl; + std::cout << "PSSK approx = " << Gudhi::kernel::approx_pssk (v1,v2,sigma) << std::endl; + std::cout << "LPWGK exact = " << Gudhi::kernel::lpwgk (v1,v2,sigma) << std::endl; + std::cout << "LPWGK approx = " << Gudhi::kernel::approx_lpwgk (v1,v2,sigma) << std::endl; + std::cout << "GPWGK exact = " << Gudhi::kernel::gpwgk (v1,v2,sigma,tau) << std::endl; + std::cout << "GPWGK approx = " << Gudhi::kernel::approx_gpwgk (v1,v2,sigma,tau) << std::endl; } diff --git a/src/Kernels/include/gudhi/kernel.h b/src/Kernels/include/gudhi/kernel.h index 6429efed..44d984bd 100644 --- a/src/Kernels/include/gudhi/kernel.h +++ b/src/Kernels/include/gudhi/kernel.h @@ -23,8 +23,6 @@ #ifndef KERNEL_H_ #define KERNEL_H_ -#define NUMPI 3.14159265359 - #include #include #include @@ -80,7 +78,7 @@ double pss_weight(std::pair P){ * @param[in] weight weight function for the points in the diagrams. * */ -double lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ +double lpwgk(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ int num_pts1 = PD1.size(); int num_pts2 = PD2.size(); double k = 0; for(int i = 0; i < num_pts1; i++) for(int j = 0; j < num_pts2; j++) @@ -96,10 +94,10 @@ double lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair(PD1[i].second,PD1[i].first)); PD pd2 = PD2; numpts = PD2.size(); for(int i = 0; i < numpts; i++) pd2.push_back(std::pair(PD2[i].second,PD2[i].first)); - return lpwg(pd1, pd2, 2*sqrt(sigma), &pss_weight) / (2*8*NUMPI*sigma); + return lpwgk(pd1, pd2, 2*sqrt(sigma), &pss_weight) / (2*8*3.14159265359*sigma); } /** \brief Computes the Gaussian Persistence Weighted Gaussian Kernel between two persistence diagrams. @@ -112,10 +110,10 @@ double pss(PD PD1, PD PD2, double sigma){ * @param[in] weight weight function for the points in the diagrams. * */ -double gpwg(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ - double k1 = lpwg(PD1,PD1,sigma,weight); - double k2 = lpwg(PD2,PD2,sigma,weight); - double k3 = lpwg(PD1,PD2,sigma,weight); +double gpwgk(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ + double k1 = lpwgk(PD1,PD1,sigma,weight); + double k2 = lpwgk(PD2,PD2,sigma,weight); + double k3 = lpwgk(PD1,PD2,sigma,weight); return exp( - (k1+k2-2*k3) / (2*pow(tau,2)) ); } @@ -129,9 +127,9 @@ double gpwg(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair * */ double dpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ - double k1 = lpwg(PD1,PD1,sigma,weight); - double k2 = lpwg(PD2,PD2,sigma,weight); - double k3 = lpwg(PD1,PD2,sigma,weight); + double k1 = lpwgk(PD1,PD1,sigma,weight); + double k2 = lpwgk(PD2,PD2,sigma,weight); + double k3 = lpwgk(PD1,PD2,sigma,weight); return std::sqrt(k1+k2-2*k3); } @@ -160,24 +158,24 @@ double compute_angle(const PD & PersDiag, const int & i, const int & j){ // Valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] double compute_int_cos(const double & alpha, const double & beta){ double res = 0; - assert((alpha >= 0 && alpha <= NUMPI) || (alpha >= -NUMPI && alpha <= 0)); - if (alpha >= 0 && alpha <= NUMPI){ + assert((alpha >= 0 && alpha <= 3.14159265359) || (alpha >= -3.14159265359 && alpha <= 0)); + if (alpha >= 0 && alpha <= 3.14159265359){ if (cos(alpha) >= 0){ - if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} + if(3.14159265359/2 <= beta){res = 2-sin(alpha)-sin(beta);} else{res = sin(beta)-sin(alpha);} } else{ - if(1.5*NUMPI <= beta){res = 2+sin(alpha)+sin(beta);} + if(1.5*3.14159265359 <= beta){res = 2+sin(alpha)+sin(beta);} else{res = sin(alpha)-sin(beta);} } } - if (alpha >= -NUMPI && alpha <= 0){ + if (alpha >= -3.14159265359 && alpha <= 0){ if (cos(alpha) <= 0){ - if(-NUMPI/2 <= beta){res = 2+sin(alpha)+sin(beta);} + if(-3.14159265359/2 <= beta){res = 2+sin(alpha)+sin(beta);} else{res = sin(alpha)-sin(beta);} } else{ - if(NUMPI/2 <= beta){res = 2-sin(alpha)-sin(beta);} + if(3.14159265359/2 <= beta){res = 2-sin(alpha)-sin(beta);} else{res = sin(beta)-sin(alpha);} } } @@ -202,9 +200,9 @@ double compute_sw(const std::vector > > & V1, int N = V1.size(); double sw = 0; for (int i = 0; i < N; i++){ std::vector > U,V; U = V1[i]; V = V2[i]; - double theta1, theta2; theta1 = -NUMPI/2; + double theta1, theta2; theta1 = -3.14159265359/2; unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); - while(theta1 != NUMPI/2){ + while(theta1 != 3.14159265359/2){ if(PD1[U[ku].first].first != PD2[V[kv].first].first || PD1[U[ku].first].second != PD2[V[kv].first].second) if(theta1 != theta2) sw += compute_int(theta1, theta2, U[ku].first, V[kv].first, PD1, PD2); @@ -214,7 +212,7 @@ double compute_sw(const std::vector > > & V1, theta2 = std::min(U[ku].second, V[kv].second); } } - return sw/NUMPI; + return sw/3.14159265359; } /** \brief Computes the Sliced Wasserstein distance between two persistence diagrams. @@ -292,8 +290,8 @@ double compute_sw(const std::vector > > & V1, } for (int i = 0; i < N; i++){ - anglePerm1[order1[i]].push_back(std::pair(i,NUMPI/2)); - anglePerm2[order2[i]].push_back(std::pair(i,NUMPI/2)); + anglePerm1[order1[i]].push_back(std::pair(i,3.14159265359/2)); + anglePerm2[order2[i]].push_back(std::pair(i,3.14159265359/2)); } // Compute the SW distance with the list of inversions. @@ -301,6 +299,17 @@ double compute_sw(const std::vector > > & V1, } + /** \brief Computes the Sliced Wasserstein Kernel between two persistence diagrams. + * \ingroup kernel + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter. + * + */ + double swk(PD PD1, PD PD2, double sigma){ + return exp( - sw(PD1,PD2) / (2*pow(sigma, 2)) ); + } @@ -370,7 +379,7 @@ std::vector > random_Fourier(double sigma, int M = 1000 * @param[in] M number of Fourier features. * */ -double approx_lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ +double approx_lpwgk(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ std::vector > Z = random_Fourier(sigma, M); std::vector > B1 = Fourier_feat(PD1,Z,weight); std::vector > B2 = Fourier_feat(PD2,Z,weight); @@ -386,10 +395,10 @@ double approx_lpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair(PD1[i].second,PD1[i].first)); PD pd2 = PD2; numpts = PD2.size(); for(int i = 0; i < numpts; i++) pd2.push_back(std::pair(PD2[i].second,PD2[i].first)); - return approx_lpwg(pd1, pd2, 2*sqrt(sigma), &pss_weight, M) / (2*8*NUMPI*sigma); + return approx_lpwgk(pd1, pd2, 2*sqrt(sigma), &pss_weight, M) / (2*8*3.14159265359*sigma); } @@ -404,7 +413,7 @@ double approx_pss(PD PD1, PD PD2, double sigma, int M = 1000){ * @param[in] M number of Fourier features. * */ -double approx_gpwg(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ +double approx_gpwgk(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ std::vector > Z = random_Fourier(sigma, M); std::vector > B1 = Fourier_feat(PD1,Z,weight); std::vector > B2 = Fourier_feat(PD2,Z,weight); @@ -422,7 +431,7 @@ double approx_gpwg(PD PD1, PD PD2, double sigma, double tau, double (*weight)(st */ double approx_sw(PD PD1, PD PD2, int N = 100){ - double step = NUMPI/N; double sw = 0; + double step = 3.14159265359/N; double sw = 0; // Add projections onto diagonal. int n1, n2; n1 = PD1.size(); n2 = PD2.size(); @@ -437,14 +446,27 @@ double approx_sw(PD PD1, PD PD2, int N = 100){ for (int i = 0; i < N; i++){ std::vector > L1, L2; for (int j = 0; j < n; j++){ - L1.push_back( std::pair(j, PD1[j].first*cos(-NUMPI/2+i*step) + PD1[j].second*sin(-NUMPI/2+i*step)) ); - L2.push_back( std::pair(j, PD2[j].first*cos(-NUMPI/2+i*step) + PD2[j].second*sin(-NUMPI/2+i*step)) ); + L1.push_back( std::pair(j, PD1[j].first*cos(-3.14159265359/2+i*step) + PD1[j].second*sin(-3.14159265359/2+i*step)) ); + L2.push_back( std::pair(j, PD2[j].first*cos(-3.14159265359/2+i*step) + PD2[j].second*sin(-3.14159265359/2+i*step)) ); } std::sort(L1.begin(),L1.end(), myComp); std::sort(L2.begin(),L2.end(), myComp); double f = 0; for (int j = 0; j < n; j++) f += std::abs(L1[j].second - L2[j].second); sw += f*step; } - return sw/NUMPI; + return sw/3.14159265359; +} + +/** \brief Computes an approximation of the Sliced Wasserstein Kernel between two persistence diagrams. + * \ingroup kernel + * + * @param[in] PD1 first persistence diagram. + * @param[in] PD2 second persistence diagram. + * @param[in] sigma bandwidth parameter. + * @param[in] N number of points sampled on the circle. + * + */ +double approx_swk(PD PD1, PD PD2, double sigma, int N = 100){ + return exp( - approx_sw(PD1,PD2,N) / (2*pow(sigma,2))); } diff --git a/src/Kernels/test/CMakeLists.txt b/src/Kernels/test/CMakeLists.txt new file mode 100644 index 00000000..9dbb9ed4 --- /dev/null +++ b/src/Kernels/test/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 2.6) +project(kernel_tests) + +include(GUDHI_test_coverage) + +add_executable ( kernel_test_unit test_kernel.cpp ) +target_link_libraries(kernel_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(kernel_test_unit ${TBB_LIBRARIES}) +endif() + +file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + +gudhi_add_coverage_test(kernel_test_unit) diff --git a/src/Kernels/test/test_kernel.cpp b/src/Kernels/test/test_kernel.cpp new file mode 100644 index 00000000..db05fd28 --- /dev/null +++ b/src/Kernels/test/test_kernel.cpp @@ -0,0 +1,56 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "kernel" + +#include +#include // float comparison +#include +#include +#include +#include // std::max +#include +#include +#include + +BOOST_AUTO_TEST_CASE(check_PSS) { + std::vector< std::pair > v1, v2; + v1.emplace_back(std::pair(0,1)); + v2.emplace_back(std::pair(0,2)); + BOOST_CHECK(std::abs(Gudhi::kernel::pssk(v1,v2,1) - Gudhi::kernel::approx_pssk(v1,v2,1)) <= 1e-1); +} + +BOOST_AUTO_TEST_CASE(check_PWG) { + std::vector< std::pair > v1, v2; + v1.emplace_back(std::pair(0,1)); + v2.emplace_back(std::pair(0,2)); + BOOST_CHECK(std::abs(Gudhi::kernel::lpwgk(v1,v2,1) - Gudhi::kernel::approx_lpwgk(v1,v2,1)) <= 1e-1); + BOOST_CHECK(std::abs(Gudhi::kernel::gpwgk(v1,v2,1,1) - Gudhi::kernel::approx_gpwgk(v1,v2,1,1)) <= 1e-1); +} + +BOOST_AUTO_TEST_CASE(check_SW) { + std::vector< std::pair > v1, v2; + v2.emplace_back(std::pair(0,2)); + BOOST_CHECK(std::abs(Gudhi::kernel::sw(v1,v2) - Gudhi::kernel::approx_sw(v1,v2)) <= 1e-3); + BOOST_CHECK(std::abs(Gudhi::kernel::sw(v1,v2) - 2*std::sqrt(2)/3.1415) <= 1e-3); +} -- cgit v1.2.3 -- cgit v1.2.3 From 6485e6957ef3c9310f618db6caaf2858cc56db66 Mon Sep 17 00:00:00 2001 From: fgodi Date: Mon, 8 Jan 2018 10:57:04 +0000 Subject: test name git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3115 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 4d65b9218d81a3f7e32dd873ba8eb0d5e6524095 --- src/Toplex_map/doc/Intro_Toplex_map.h | 5 +++++ src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 12 ---------- src/Toplex_map/test/CMakeLists.txt | 2 +- src/Toplex_map/test/toplex_map_unit_test.cpp | 28 ++---------------------- 4 files changed, 8 insertions(+), 39 deletions(-) diff --git a/src/Toplex_map/doc/Intro_Toplex_map.h b/src/Toplex_map/doc/Intro_Toplex_map.h index 6f4c1a1b..e3f18b32 100644 --- a/src/Toplex_map/doc/Intro_Toplex_map.h +++ b/src/Toplex_map/doc/Intro_Toplex_map.h @@ -49,6 +49,11 @@ namespace Gudhi { * * \image html map.png * + * The performances are a lot better than in simplex tree as soon you use maximal simplices and not simplices, + * here the construction of a strong witness complex of a point set with growing parameter : + * + * \image html graph.png + * */ /** @} */ // end defgroup toplex_map diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index ea5ac618..abd815f9 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -31,18 +31,6 @@ struct Visitor { class Fake_simplex_tree : public Filtered_toplex_map { public: - /** Vertex is the type of vertices. - * \ingroup toplex_map */ - typedef Toplex_map::Vertex Vertex; - - /** Simplex is the type of simplices. - * \ingroup toplex_map */ - typedef Toplex_map::Simplex Simplex; - - /** The type of the pointers to maximal simplices. - * \ingroup toplex_map */ - typedef Toplex_map::Simplex_ptr Simplex_ptr; - /** The type of the sets of Simplex_ptr. * \ingroup toplex_map */ typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; diff --git a/src/Toplex_map/test/CMakeLists.txt b/src/Toplex_map/test/CMakeLists.txt index 25fcabac..5ed55e97 100644 --- a/src/Toplex_map/test/CMakeLists.txt +++ b/src/Toplex_map/test/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required(VERSION 2.6) project(Toplex_map_tests) -add_executable ( ToplexMapUT test.cpp ) +add_executable ( ToplexMapUT toplex_map_unit_test.cpp ) target_link_libraries(ToplexMapUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp index 3f4d96c2..b7a9251c 100644 --- a/src/Toplex_map/test/toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/toplex_map_unit_test.cpp @@ -1,5 +1,4 @@ #include -#include #include #include @@ -9,6 +8,8 @@ using namespace Gudhi; +typedef Toplex_map::Vertex Vertex; + std::vector sigma1 = {1, 2, 3, 4}; std::vector sigma2 = {5, 2, 3, 6}; std::vector sigma3 = {5}; @@ -33,18 +34,6 @@ BOOST_AUTO_TEST_CASE(toplexmap) { BOOST_CHECK(!K.membership(sigma6)); } -BOOST_AUTO_TEST_CASE(ltoplexmap) { - Lazy_Toplex_map K; - K.insert_simplex(sigma1); - K.insert_simplex(sigma2); - K.insert_simplex(sigma3); - K.insert_simplex(sigma6); - K.insert_simplex(sigma7); - BOOST_CHECK(K.membership(sigma4)); - BOOST_CHECK(!K.membership(sigma5)); - K.contraction(4,5); - BOOST_CHECK(!K.membership(sigma6)); -} BOOST_AUTO_TEST_CASE(ftoplexmap) { Filtered_toplex_map K; @@ -56,16 +45,3 @@ BOOST_AUTO_TEST_CASE(ftoplexmap) { BOOST_CHECK(K.filtration(sigma3)==1.); } -/* -BOOST_AUTO_TEST_CASE(toplexmap_candidates) { - Toplex_map K; - K.insert_simplex(sigma1); - K.insert_simplex(sigma2); - K.remove_simplex(sigma1); - K.remove_simplex(sigma2); - auto c = K.candidates(); - BOOST_CHECK(c.count(get_key(sigma1))); - BOOST_CHECK(c.count(get_key(sigma2))); - BOOST_CHECK(c.size()==2); -} -*/ -- cgit v1.2.3 From b39719177ee05b91be9abdafe093591fae2040e9 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Tue, 9 Jan 2018 16:25:48 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3121 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d7b24f452fd8e75aac61c40f0281dca683d7fa03 --- src/Kernels/example/CMakeLists.txt | 11 ----------- src/Kernels/test/CMakeLists.txt | 2 -- 2 files changed, 13 deletions(-) delete mode 100644 src/Kernels/example/CMakeLists.txt diff --git a/src/Kernels/example/CMakeLists.txt b/src/Kernels/example/CMakeLists.txt deleted file mode 100644 index 57e13004..00000000 --- a/src/Kernels/example/CMakeLists.txt +++ /dev/null @@ -1,11 +0,0 @@ -cmake_minimum_required(VERSION 2.6) -project(Kernels_examples) - -add_executable ( BasicEx kernel_basic_example.cpp ) - -if (TBB_FOUND) - target_link_libraries(BasicEx ${TBB_LIBRARIES}) -endif() - -add_test(NAME Kernels_example_basicex COMMAND $ - "") diff --git a/src/Kernels/test/CMakeLists.txt b/src/Kernels/test/CMakeLists.txt index 9dbb9ed4..95c72a7f 100644 --- a/src/Kernels/test/CMakeLists.txt +++ b/src/Kernels/test/CMakeLists.txt @@ -9,6 +9,4 @@ if (TBB_FOUND) target_link_libraries(kernel_test_unit ${TBB_LIBRARIES}) endif() -file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - gudhi_add_coverage_test(kernel_test_unit) -- cgit v1.2.3 From 73392dcec4e9977ca862eaf1cf9beaa6f9a133f7 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Tue, 9 Jan 2018 16:27:19 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3122 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: e47b68d4c035119352ad433a9efd516aef3a70f5 --- src/Kernels/example/CMakeLists.txt | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 src/Kernels/example/CMakeLists.txt diff --git a/src/Kernels/example/CMakeLists.txt b/src/Kernels/example/CMakeLists.txt new file mode 100644 index 00000000..d8ad4b42 --- /dev/null +++ b/src/Kernels/example/CMakeLists.txt @@ -0,0 +1,10 @@ +cmake_minimum_required(VERSION 2.6) +project(Kernels_examples) + +add_executable ( BasicEx kernel_basic_example.cpp ) + +if (TBB_FOUND) + target_link_libraries(BasicEx ${TBB_LIBRARIES}) +endif() + +add_test(NAME Kernels_example_basicex COMMAND $ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/PD1" "${CMAKE_SOURCE_DIR}/data/persistence_diagram/PD2") \ No newline at end of file -- cgit v1.2.3 From 1c6a680aee1ff1193ea546cfaeb63b18d38b97fa Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 11 Jan 2018 11:04:38 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3125 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 4064ad30936118d97d16f84a5288e349f86109b6 --- src/Kernels/include/gudhi/kernel.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Kernels/include/gudhi/kernel.h b/src/Kernels/include/gudhi/kernel.h index 44d984bd..30864476 100644 --- a/src/Kernels/include/gudhi/kernel.h +++ b/src/Kernels/include/gudhi/kernel.h @@ -43,8 +43,8 @@ #include #include #include -#include -#include +//#include +//#include #include #include #include -- cgit v1.2.3 From be131d6f74a9264e15a0b1c1e72fa8967c4518bd Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 11 Jan 2018 16:31:30 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3129 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 567185cf3691252c2685c450d65787d824442bb2 --- src/Kernels/example/kernel_basic_example.cpp | 5 ++ src/Kernels/include/gudhi/kernel.h | 95 +++++++++++++--------------- 2 files changed, 49 insertions(+), 51 deletions(-) diff --git a/src/Kernels/example/kernel_basic_example.cpp b/src/Kernels/example/kernel_basic_example.cpp index 46e42c9d..85ce36d4 100644 --- a/src/Kernels/example/kernel_basic_example.cpp +++ b/src/Kernels/example/kernel_basic_example.cpp @@ -21,6 +21,11 @@ */ #include +#include +#include +#include +#include + void usage(int nbArgs, char *const progName) { std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; diff --git a/src/Kernels/include/gudhi/kernel.h b/src/Kernels/include/gudhi/kernel.h index 30864476..900db092 100644 --- a/src/Kernels/include/gudhi/kernel.h +++ b/src/Kernels/include/gudhi/kernel.h @@ -4,7 +4,7 @@ * * Author(s): Mathieu Carrière * - * Copyright (C) 2017 INRIA (France) + * Copyright (C) 2018 INRIA (France) * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by @@ -23,45 +23,32 @@ #ifndef KERNEL_H_ #define KERNEL_H_ -#include -#include #include -#include -#include -#include -#include -#include -#include -#include -#include #include #include -#include -#include #include -#include -#include -#include -#include -//#include -//#include #include -#include -#include +#include -using PD = std::vector >; -bool sortAngle(const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);} -bool myComp(const std::pair & P1, const std::pair & P2){return P1.second < P2.second;} namespace Gudhi { namespace kernel { +using PD = std::vector >; +double pi = boost::math::constants::pi(); + +bool sortAngle(const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);} +bool myComp(const std::pair & P1, const std::pair & P2){return P1.second < P2.second;} double pss_weight(std::pair P){ if(P.second > P.first) return 1; else return -1; } +double arctan_weight(std::pair P){ + return atan(P.second - P.first); +} + @@ -78,7 +65,8 @@ double pss_weight(std::pair P){ * @param[in] weight weight function for the points in the diagrams. * */ -double lpwgk(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ +template) > +double lpwgk(const PD & PD1, const PD & PD2, double sigma, Weight weight = arctan_weight){ int num_pts1 = PD1.size(); int num_pts2 = PD2.size(); double k = 0; for(int i = 0; i < num_pts1; i++) for(int j = 0; j < num_pts2; j++) @@ -94,10 +82,10 @@ double lpwgk(PD PD1, PD PD2, double sigma, double (*weight)(std::pair(PD1[i].second,PD1[i].first)); PD pd2 = PD2; numpts = PD2.size(); for(int i = 0; i < numpts; i++) pd2.push_back(std::pair(PD2[i].second,PD2[i].first)); - return lpwgk(pd1, pd2, 2*sqrt(sigma), &pss_weight) / (2*8*3.14159265359*sigma); + return lpwgk(pd1, pd2, 2*sqrt(sigma), &pss_weight) / (2*8*pi*sigma); } /** \brief Computes the Gaussian Persistence Weighted Gaussian Kernel between two persistence diagrams. @@ -110,7 +98,8 @@ double pssk(PD PD1, PD PD2, double sigma){ * @param[in] weight weight function for the points in the diagrams. * */ -double gpwgk(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ +template) > +double gpwgk(const PD & PD1, const PD & PD2, double sigma, double tau, Weight weight = arctan_weight){ double k1 = lpwgk(PD1,PD1,sigma,weight); double k2 = lpwgk(PD2,PD2,sigma,weight); double k3 = lpwgk(PD1,PD2,sigma,weight); @@ -126,7 +115,8 @@ double gpwgk(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pai * @param[in] weight weight function for the points in the diagrams. * */ -double dpwg(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ +template) > +double dpwg(const PD & PD1, const PD & PD2, double sigma, Weight weight = arctan_weight){ double k1 = lpwgk(PD1,PD1,sigma,weight); double k2 = lpwgk(PD2,PD2,sigma,weight); double k3 = lpwgk(PD1,PD2,sigma,weight); @@ -158,24 +148,24 @@ double compute_angle(const PD & PersDiag, const int & i, const int & j){ // Valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] double compute_int_cos(const double & alpha, const double & beta){ double res = 0; - assert((alpha >= 0 && alpha <= 3.14159265359) || (alpha >= -3.14159265359 && alpha <= 0)); - if (alpha >= 0 && alpha <= 3.14159265359){ + //assert((alpha >= 0 && alpha <= pi) || (alpha >= -pi && alpha <= 0)); + if (alpha >= 0 && alpha <= pi){ if (cos(alpha) >= 0){ - if(3.14159265359/2 <= beta){res = 2-sin(alpha)-sin(beta);} + if(pi/2 <= beta){res = 2-sin(alpha)-sin(beta);} else{res = sin(beta)-sin(alpha);} } else{ - if(1.5*3.14159265359 <= beta){res = 2+sin(alpha)+sin(beta);} + if(1.5*pi <= beta){res = 2+sin(alpha)+sin(beta);} else{res = sin(alpha)-sin(beta);} } } - if (alpha >= -3.14159265359 && alpha <= 0){ + if (alpha >= -pi && alpha <= 0){ if (cos(alpha) <= 0){ - if(-3.14159265359/2 <= beta){res = 2+sin(alpha)+sin(beta);} + if(-pi/2 <= beta){res = 2+sin(alpha)+sin(beta);} else{res = sin(alpha)-sin(beta);} } else{ - if(3.14159265359/2 <= beta){res = 2-sin(alpha)-sin(beta);} + if(pi/2 <= beta){res = 2-sin(alpha)-sin(beta);} else{res = sin(beta)-sin(alpha);} } } @@ -200,9 +190,9 @@ double compute_sw(const std::vector > > & V1, int N = V1.size(); double sw = 0; for (int i = 0; i < N; i++){ std::vector > U,V; U = V1[i]; V = V2[i]; - double theta1, theta2; theta1 = -3.14159265359/2; + double theta1, theta2; theta1 = -pi/2; unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); - while(theta1 != 3.14159265359/2){ + while(theta1 != pi/2){ if(PD1[U[ku].first].first != PD2[V[kv].first].first || PD1[U[ku].first].second != PD2[V[kv].first].second) if(theta1 != theta2) sw += compute_int(theta1, theta2, U[ku].first, V[kv].first, PD1, PD2); @@ -212,7 +202,7 @@ double compute_sw(const std::vector > > & V1, theta2 = std::min(U[ku].second, V[kv].second); } } - return sw/3.14159265359; + return sw/pi; } /** \brief Computes the Sliced Wasserstein distance between two persistence diagrams. @@ -234,7 +224,7 @@ double compute_sw(const std::vector > > & V1, max_ordinate = std::max(max_ordinate, PD1[i].second); PD2.push_back( std::pair( ((PD1[i].first+PD1[i].second)/2), ((PD1[i].first+PD1[i].second)/2) ) ); } - int N = PD1.size(); assert(N==PD2.size()); + int N = PD1.size(); // Slightly perturb the points so that the PDs are in generic positions. int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} @@ -290,8 +280,8 @@ double compute_sw(const std::vector > > & V1, } for (int i = 0; i < N; i++){ - anglePerm1[order1[i]].push_back(std::pair(i,3.14159265359/2)); - anglePerm2[order2[i]].push_back(std::pair(i,3.14159265359/2)); + anglePerm1[order1[i]].push_back(std::pair(i,pi/2)); + anglePerm2[order2[i]].push_back(std::pair(i,pi/2)); } // Compute the SW distance with the list of inversions. @@ -344,7 +334,8 @@ double approx_dpwg_Fourier(const std::vector >& B1, con return std::sqrt((1.0/M)*(d1+d2)-2*d3); } -std::vector > Fourier_feat(PD D, std::vector > Z, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}){ +template) > +std::vector > Fourier_feat(PD D, std::vector > Z, Weight weight = arctan_weight){ int m = D.size(); std::vector > B; int M = Z.size(); for(int i = 0; i < M; i++){ double d1 = 0; double d2 = 0; double zx = Z[i].first; double zy = Z[i].second; @@ -379,7 +370,8 @@ std::vector > random_Fourier(double sigma, int M = 1000 * @param[in] M number of Fourier features. * */ -double approx_lpwgk(PD PD1, PD PD2, double sigma, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ +template) > +double approx_lpwgk(const PD & PD1, const PD & PD2, double sigma, Weight weight = arctan_weight, int M = 1000){ std::vector > Z = random_Fourier(sigma, M); std::vector > B1 = Fourier_feat(PD1,Z,weight); std::vector > B2 = Fourier_feat(PD2,Z,weight); @@ -395,10 +387,10 @@ double approx_lpwgk(PD PD1, PD PD2, double sigma, double (*weight)(std::pair(PD1[i].second,PD1[i].first)); PD pd2 = PD2; numpts = PD2.size(); for(int i = 0; i < numpts; i++) pd2.push_back(std::pair(PD2[i].second,PD2[i].first)); - return approx_lpwgk(pd1, pd2, 2*sqrt(sigma), &pss_weight, M) / (2*8*3.14159265359*sigma); + return approx_lpwgk(pd1, pd2, 2*sqrt(sigma), &pss_weight, M) / (2*8*pi*sigma); } @@ -413,7 +405,8 @@ double approx_pssk(PD PD1, PD PD2, double sigma, int M = 1000){ * @param[in] M number of Fourier features. * */ -double approx_gpwgk(PD PD1, PD PD2, double sigma, double tau, double (*weight)(std::pair) = [](std::pair P){return atan(P.second - P.first);}, int M = 1000){ +template) > +double approx_gpwgk(const PD & PD1, const PD & PD2, double sigma, double tau, Weight weight = arctan_weight, int M = 1000){ std::vector > Z = random_Fourier(sigma, M); std::vector > B1 = Fourier_feat(PD1,Z,weight); std::vector > B2 = Fourier_feat(PD2,Z,weight); @@ -431,7 +424,7 @@ double approx_gpwgk(PD PD1, PD PD2, double sigma, double tau, double (*weight)(s */ double approx_sw(PD PD1, PD PD2, int N = 100){ - double step = 3.14159265359/N; double sw = 0; + double step = pi/N; double sw = 0; // Add projections onto diagonal. int n1, n2; n1 = PD1.size(); n2 = PD2.size(); @@ -446,14 +439,14 @@ double approx_sw(PD PD1, PD PD2, int N = 100){ for (int i = 0; i < N; i++){ std::vector > L1, L2; for (int j = 0; j < n; j++){ - L1.push_back( std::pair(j, PD1[j].first*cos(-3.14159265359/2+i*step) + PD1[j].second*sin(-3.14159265359/2+i*step)) ); - L2.push_back( std::pair(j, PD2[j].first*cos(-3.14159265359/2+i*step) + PD2[j].second*sin(-3.14159265359/2+i*step)) ); + L1.push_back( std::pair(j, PD1[j].first*cos(-pi/2+i*step) + PD1[j].second*sin(-pi/2+i*step)) ); + L2.push_back( std::pair(j, PD2[j].first*cos(-pi/2+i*step) + PD2[j].second*sin(-pi/2+i*step)) ); } std::sort(L1.begin(),L1.end(), myComp); std::sort(L2.begin(),L2.end(), myComp); double f = 0; for (int j = 0; j < n; j++) f += std::abs(L1[j].second - L2[j].second); sw += f*step; } - return sw/3.14159265359; + return sw/pi; } /** \brief Computes an approximation of the Sliced Wasserstein Kernel between two persistence diagrams. -- cgit v1.2.3 From ff0dc023588e3b33bc4bc7f26ce1f68c647ae441 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 16 Feb 2018 15:43:29 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3251 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 80f084fc990df6e5c6b60ac83514220aba2ceb5c --- .../example/CMakeLists.txt | 10 +++ .../example/persistence_weighted_gaussian.cpp | 34 +++++----- .../example/sliced_wasserstein.cpp | 16 +++-- .../include/gudhi/Persistence_weighted_gaussian.h | 78 ++++++++++++---------- .../include/gudhi/Sliced_Wasserstein.h | 26 +++++--- 5 files changed, 98 insertions(+), 66 deletions(-) diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index 54d719ac..79d39c4d 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -27,3 +27,13 @@ add_test(NAME Persistence_representations_example_heat_maps COMMAND $) install(TARGETS Persistence_representations_example_heat_maps DESTINATION bin) +add_executable ( Sliced_Wasserstein sliced_wasserstein.cpp ) +add_test(NAME Sliced_Wasserstein + COMMAND $) +install(TARGETS Sliced_Wasserstein DESTINATION bin) + +add_executable ( Persistence_weighted_gaussian persistence_weighted_gaussian.cpp ) +add_test(NAME Persistence_weighted_gaussian + COMMAND $) +install(TARGETS Persistence_weighted_gaussian DESTINATION bin) + diff --git a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp index e95b9445..a0e820ea 100644 --- a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp +++ b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp @@ -44,21 +44,24 @@ int main(int argc, char** argv) { persistence2.push_back(std::make_pair(3, 5)); persistence2.push_back(std::make_pair(6, 10)); - PWG PWG1(persistence1); - PWG PWG2(persistence2); double sigma = 1; double tau = 1; - int m = 1000; + int m = 10000; + PWG PWG1(persistence1, sigma, m, PWG::arctan_weight); + PWG PWG2(persistence2, sigma, m, PWG::arctan_weight); + + PWG PWGex1(persistence1, sigma, -1, PWG::arctan_weight); + PWG PWGex2(persistence2, sigma, -1, PWG::arctan_weight); // Linear PWG - std::cout << PWG1.compute_scalar_product (PWG2, sigma, PWG::arctan_weight, m) << std::endl; - std::cout << PWG1.compute_scalar_product (PWG2, sigma, PWG::arctan_weight, -1) << std::endl; + std::cout << PWG1.compute_scalar_product (PWG2) << std::endl; + std::cout << PWGex1.compute_scalar_product (PWGex2) << std::endl; - std::cout << PWG1.distance (PWG2, sigma, PWG::arctan_weight, m) << std::endl; - std::cout << PWG1.distance (PWG2, sigma, PWG::arctan_weight, -1) << std::endl; + std::cout << PWG1.distance (PWG2) << std::endl; + std::cout << PWGex1.distance (PWGex2) << std::endl; @@ -68,8 +71,8 @@ int main(int argc, char** argv) { // Gaussian PWG - std::cout << std::exp( -PWG1.distance (PWG2, sigma, PWG::arctan_weight, m, 2) ) / (2*tau*tau) << std::endl; - std::cout << std::exp( -PWG1.distance (PWG2, sigma, PWG::arctan_weight, -1, 2) ) / (2*tau*tau) << std::endl; + std::cout << std::exp( -PWG1.distance (PWG2, 2) ) / (2*tau*tau) << std::endl; + std::cout << std::exp( -PWGex1.distance (PWGex2, 2) ) / (2*tau*tau) << std::endl; @@ -82,14 +85,15 @@ int main(int argc, char** argv) { PD pd1 = persistence1; int numpts = persistence1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(persistence1[i].second,persistence1[i].first); PD pd2 = persistence2; numpts = persistence2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(persistence2[i].second,persistence2[i].first); - PWG pwg1(pd1); - PWG pwg2(pd2); + PWG pwg1(pd1, 2*std::sqrt(sigma), m, PWG::pss_weight); + PWG pwg2(pd2, 2*std::sqrt(sigma), m, PWG::pss_weight); + + PWG pwgex1(pd1, 2*std::sqrt(sigma), -1, PWG::pss_weight); + PWG pwgex2(pd2, 2*std::sqrt(sigma), -1, PWG::pss_weight); - std::cout << pwg1.compute_scalar_product (pwg2, 2*std::sqrt(sigma), PWG::pss_weight, m) / (16*pi*sigma) << std::endl; - std::cout << pwg1.compute_scalar_product (pwg2, 2*std::sqrt(sigma), PWG::pss_weight, -1) / (16*pi*sigma) << std::endl; + std::cout << pwg1.compute_scalar_product (pwg2) / (16*pi*sigma) << std::endl; + std::cout << pwgex1.compute_scalar_product (pwgex2) / (16*pi*sigma) << std::endl; - std::cout << pwg1.distance (pwg2, 2*std::sqrt(sigma), PWG::pss_weight, m) / (16*pi*sigma) << std::endl; - std::cout << pwg1.distance (pwg2, 2*std::sqrt(sigma), PWG::pss_weight, -1) / (16*pi*sigma) << std::endl; return 0; diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp index 673d8474..f153fbe8 100644 --- a/src/Persistence_representations/example/sliced_wasserstein.cpp +++ b/src/Persistence_representations/example/sliced_wasserstein.cpp @@ -43,13 +43,17 @@ int main(int argc, char** argv) { persistence2.push_back(std::make_pair(3, 5)); persistence2.push_back(std::make_pair(6, 10)); - SW SW1(persistence1); - SW SW2(persistence2); - std::cout << SW1.compute_sliced_wasserstein_distance(SW2,100) << std::endl; - std::cout << SW1.compute_sliced_wasserstein_distance(SW2,-1) << std::endl; - std::cout << SW1.compute_scalar_product(SW2,1,100) << std::endl; - std::cout << SW1.distance(SW2,1,100,1) << std::endl; + SW sw1(persistence1, 1, 100); + SW sw2(persistence2, 1, 100); + + SW swex1(persistence1, 1, -1); + SW swex2(persistence2, 1, -1); + + std::cout << sw1.compute_sliced_wasserstein_distance(sw2) << std::endl; + std::cout << swex1.compute_sliced_wasserstein_distance(swex2) << std::endl; + std::cout << sw1.compute_scalar_product(sw2) << std::endl; + std::cout << swex1.distance(swex2) << std::endl; return 0; } diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index 2884885c..2b25b9a8 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -45,6 +45,7 @@ double pi = boost::math::constants::pi(); using PD = std::vector >; +using Weight = std::function) >; namespace Gudhi { namespace Persistence_representations { @@ -53,11 +54,18 @@ class Persistence_weighted_gaussian{ protected: PD diagram; + Weight weight; + double sigma; + int approx; public: - Persistence_weighted_gaussian(PD _diagram){diagram = _diagram;} + Persistence_weighted_gaussian(PD _diagram){diagram = _diagram; sigma = 1.0; approx = 1000; weight = arctan_weight;} + Persistence_weighted_gaussian(PD _diagram, double _sigma, int _approx, Weight _weight){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} PD get_diagram(){return this->diagram;} + double get_sigma(){return this->sigma;} + int get_approx(){return this->approx;} + Weight get_weight(){return this->weight;} // ********************************** @@ -65,38 +73,37 @@ class Persistence_weighted_gaussian{ // ********************************** - static double pss_weight(std::pair P){ - if(P.second > P.first) return 1; + static double pss_weight(std::pair p){ + if(p.second > p.first) return 1; else return -1; } - static double arctan_weight(std::pair P){ - return atan(P.second - P.first); + static double arctan_weight(std::pair p){ + return atan(p.second - p.first); } - template) > > - std::vector > Fourier_feat(PD D, std::vector > Z, Weight weight = arctan_weight){ - int m = D.size(); std::vector > B; int M = Z.size(); - for(int i = 0; i < M; i++){ - double d1 = 0; double d2 = 0; double zx = Z[i].first; double zy = Z[i].second; - for(int j = 0; j < m; j++){ - double x = D[j].first; double y = D[j].second; - d1 += weight(D[j])*cos(x*zx + y*zy); - d2 += weight(D[j])*sin(x*zx + y*zy); + std::vector > Fourier_feat(PD diag, std::vector > z, Weight weight = arctan_weight){ + int md = diag.size(); std::vector > b; int mz = z.size(); + for(int i = 0; i < mz; i++){ + double d1 = 0; double d2 = 0; double zx = z[i].first; double zy = z[i].second; + for(int j = 0; j < md; j++){ + double x = diag[j].first; double y = diag[j].second; + d1 += weight(diag[j])*cos(x*zx + y*zy); + d2 += weight(diag[j])*sin(x*zx + y*zy); } - B.emplace_back(d1,d2); + b.emplace_back(d1,d2); } - return B; + return b; } - std::vector > random_Fourier(double sigma, int M = 1000){ - std::normal_distribution distrib(0,1); std::vector > Z; std::random_device rd; - for(int i = 0; i < M; i++){ + std::vector > random_Fourier(double sigma, int m = 1000){ + std::normal_distribution distrib(0,1); std::vector > z; std::random_device rd; + for(int i = 0; i < m; i++){ std::mt19937 e1(rd()); std::mt19937 e2(rd()); double zx = distrib(e1); double zy = distrib(e2); - Z.emplace_back(zx/sigma,zy/sigma); + z.emplace_back(zx/sigma,zy/sigma); } - return Z; + return z; } @@ -106,32 +113,33 @@ class Persistence_weighted_gaussian{ // ********************************** - template) > > - double compute_scalar_product(Persistence_weighted_gaussian second, double sigma, Weight weight = arctan_weight, int m = 1000){ + double compute_scalar_product(Persistence_weighted_gaussian second){ PD diagram1 = this->diagram; PD diagram2 = second.diagram; - if(m == -1){ + if(this->approx == -1){ int num_pts1 = diagram1.size(); int num_pts2 = diagram2.size(); double k = 0; for(int i = 0; i < num_pts1; i++) for(int j = 0; j < num_pts2; j++) - k += weight(diagram1[i])*weight(diagram2[j])*exp(-((diagram1[i].first - diagram2[j].first) * (diagram1[i].first - diagram2[j].first) + - (diagram1[i].second - diagram2[j].second) * (diagram1[i].second - diagram2[j].second)) - /(2*sigma*sigma)); + k += this->weight(diagram1[i])*this->weight(diagram2[j])*exp(-((diagram1[i].first - diagram2[j].first) * (diagram1[i].first - diagram2[j].first) + + (diagram1[i].second - diagram2[j].second) * (diagram1[i].second - diagram2[j].second)) + /(2*this->sigma*this->sigma)); return k; } else{ - std::vector > z = random_Fourier(sigma, m); - std::vector > b1 = Fourier_feat(diagram1,z,weight); - std::vector > b2 = Fourier_feat(diagram2,z,weight); - double d = 0; for(int i = 0; i < m; i++) d += b1[i].first*b2[i].first + b1[i].second*b2[i].second; - return d/m; + std::vector > z = random_Fourier(this->sigma, this->approx); + std::vector > b1 = Fourier_feat(diagram1,z,this->weight); + std::vector > b2 = Fourier_feat(diagram2,z,this->weight); + double d = 0; for(int i = 0; i < this->approx; i++) d += b1[i].first*b2[i].first + b1[i].second*b2[i].second; + return d/this->approx; } } - template) > > - double distance(Persistence_weighted_gaussian second, double sigma, Weight weight = arctan_weight, int m = 1000, double power = 1) { - return std::pow(this->compute_scalar_product(*this, sigma, weight, m) + second.compute_scalar_product(second, sigma, weight, m)-2*this->compute_scalar_product(second, sigma, weight, m), power/2.0); + double distance(Persistence_weighted_gaussian second, double power = 1) { + if(this->sigma != second.get_sigma() || this->approx != second.get_approx()){ + std::cout << "Error: different representations!" << std::endl; return 0; + } + else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), power/2.0); } diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index 4fa6151f..ad1a6c42 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -53,11 +53,16 @@ class Sliced_Wasserstein { protected: PD diagram; + int approx; + double sigma; public: - Sliced_Wasserstein(PD _diagram){diagram = _diagram;} + Sliced_Wasserstein(PD _diagram){diagram = _diagram; approx = 100; sigma = 0.001;} + Sliced_Wasserstein(PD _diagram, double _sigma, int _approx){diagram = _diagram; approx = _approx; sigma = _sigma;} PD get_diagram(){return this->diagram;} + int get_approx(){return this->approx;} + double get_sigma(){return this->sigma;} // ********************************** @@ -130,11 +135,11 @@ class Sliced_Wasserstein { // Scalar product + distance. // ********************************** - double compute_sliced_wasserstein_distance(Sliced_Wasserstein second, int approx) { + double compute_sliced_wasserstein_distance(Sliced_Wasserstein second) { PD diagram1 = this->diagram; PD diagram2 = second.diagram; double sw = 0; - if(approx == -1){ + if(this->approx == -1){ // Add projections onto diagonal. int n1, n2; n1 = diagram1.size(); n2 = diagram2.size(); double max_ordinate = std::numeric_limits::lowest(); @@ -226,7 +231,7 @@ class Sliced_Wasserstein { else{ - double step = pi/approx; + double step = pi/this->approx; // Add projections onto diagonal. int n1, n2; n1 = diagram1.size(); n2 = diagram2.size(); @@ -238,7 +243,7 @@ class Sliced_Wasserstein { // Sort and compare all projections. #ifdef GUDHI_USE_TBB - tbb::parallel_for(0, approx, [&](int i){ + tbb::parallel_for(0, this->approx, [&](int i){ std::vector > l1, l2; for (int j = 0; j < n; j++){ l1.emplace_back( j, diagram1[j].first*cos(-pi/2+i*step) + diagram1[j].second*sin(-pi/2+i*step) ); @@ -250,7 +255,7 @@ class Sliced_Wasserstein { sw += f*step; }); #else - for (int i = 0; i < approx; i++){ + for (int i = 0; i < this->approx; i++){ std::vector > l1, l2; for (int j = 0; j < n; j++){ l1.emplace_back( j, diagram1[j].first*cos(-pi/2+i*step) + diagram1[j].second*sin(-pi/2+i*step) ); @@ -268,12 +273,13 @@ class Sliced_Wasserstein { } - double compute_scalar_product(Sliced_Wasserstein second, double sigma, int approx = 100) { - return std::exp(-compute_sliced_wasserstein_distance(second, approx)/(2*sigma*sigma)); + double compute_scalar_product(Sliced_Wasserstein second){ + return std::exp(-compute_sliced_wasserstein_distance(second)/(2*this->sigma*this->sigma)); } - double distance(Sliced_Wasserstein second, double sigma, int approx = 100, double power = 1) { - return std::pow(this->compute_scalar_product(*this, sigma, approx) + second.compute_scalar_product(second, sigma, approx)-2*this->compute_scalar_product(second, sigma, approx), power/2.0); + double distance(Sliced_Wasserstein second, double power = 1) { + if(this->sigma != second.sigma || this->approx != second.approx){std::cout << "Error: different representations!" << std::endl; return 0;} + else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), power/2.0); } -- cgit v1.2.3 From 220a91b55e0952947f96ea4a09085b0720466c64 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Tue, 27 Feb 2018 15:56:28 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3259 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: eedc1bf47f0194b23dfc02425bedd2682bd3f573 --- .../include/gudhi/Persistence_weighted_gaussian.h | 4 --- .../include/gudhi/Sliced_Wasserstein.h | 38 ++++++---------------- 2 files changed, 10 insertions(+), 32 deletions(-) diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index 2b25b9a8..a6efa72d 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -23,10 +23,6 @@ #ifndef PERSISTENCE_WEIGHTED_GAUSSIAN_H_ #define PERSISTENCE_WEIGHTED_GAUSSIAN_H_ -#ifdef GUDHI_USE_TBB -#include -#endif - // gudhi include #include diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index ad1a6c42..6196e207 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -23,10 +23,6 @@ #ifndef SLICED_WASSERSTEIN_H_ #define SLICED_WASSERSTEIN_H_ -#ifdef GUDHI_USE_TBB -#include -#endif - // gudhi include #include @@ -242,31 +238,17 @@ class Sliced_Wasserstein { int n = diagram1.size(); // Sort and compare all projections. - #ifdef GUDHI_USE_TBB - tbb::parallel_for(0, this->approx, [&](int i){ - std::vector > l1, l2; - for (int j = 0; j < n; j++){ - l1.emplace_back( j, diagram1[j].first*cos(-pi/2+i*step) + diagram1[j].second*sin(-pi/2+i*step) ); - l2.emplace_back( j, diagram2[j].first*cos(-pi/2+i*step) + diagram2[j].second*sin(-pi/2+i*step) ); - } - std::sort(l1.begin(),l1.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); - std::sort(l2.begin(),l2.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); - double f = 0; for (int j = 0; j < n; j++) f += std::abs(l1[j].second - l2[j].second); - sw += f*step; - }); - #else - for (int i = 0; i < this->approx; i++){ - std::vector > l1, l2; - for (int j = 0; j < n; j++){ - l1.emplace_back( j, diagram1[j].first*cos(-pi/2+i*step) + diagram1[j].second*sin(-pi/2+i*step) ); - l2.emplace_back( j, diagram2[j].first*cos(-pi/2+i*step) + diagram2[j].second*sin(-pi/2+i*step) ); - } - std::sort(l1.begin(),l1.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); - std::sort(l2.begin(),l2.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); - double f = 0; for (int j = 0; j < n; j++) f += std::abs(l1[j].second - l2[j].second); - sw += f*step; + for (int i = 0; i < this->approx; i++){ + std::vector > l1, l2; + for (int j = 0; j < n; j++){ + l1.emplace_back( j, diagram1[j].first*cos(-pi/2+i*step) + diagram1[j].second*sin(-pi/2+i*step) ); + l2.emplace_back( j, diagram2[j].first*cos(-pi/2+i*step) + diagram2[j].second*sin(-pi/2+i*step) ); } - #endif + std::sort(l1.begin(),l1.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); + std::sort(l2.begin(),l2.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); + double f = 0; for (int j = 0; j < n; j++) f += std::abs(l1[j].second - l2[j].second); + sw += f*step; + } } return sw/pi; -- cgit v1.2.3 From d574f7f65acdd6dde92150879c06db5e6e0b75a9 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Mon, 5 Mar 2018 13:57:02 +0000 Subject: added files for cythonization of kernels git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3263 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 5bc7aadf2696ea2f94384a163451237016a7effb --- src/cython/cython/kernels.pyx | 47 ++++++++++++++++++++++++++++++++ src/cython/gudhi.pyx.in | 1 + src/cython/include/Kernels_interface.h | 49 ++++++++++++++++++++++++++++++++++ 3 files changed, 97 insertions(+) create mode 100644 src/cython/cython/kernels.pyx create mode 100644 src/cython/include/Kernels_interface.h diff --git a/src/cython/cython/kernels.pyx b/src/cython/cython/kernels.pyx new file mode 100644 index 00000000..220fc6ce --- /dev/null +++ b/src/cython/cython/kernels.pyx @@ -0,0 +1,47 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +import os + +"""This file is part of the Gudhi Library. The Gudhi library + (Geometric Understanding in Higher Dimensions) is a generic C++ + library for computational topology. + + Author(s): Mathieu Carriere + + Copyright (C) 2018 INRIA + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +__author__ = "Mathieu Carriere" +__copyright__ = "Copyright (C) 2018 INRIA" +__license__ = "GPL v3" + +cdef extern from "Kernels_interface.h" namespace "Gudhi::persistence_diagram": + double sw(vector[pair[double, double]], vector[pair[double, double]], double, int) + +def sliced_wasserstein(diagram_1, diagram_2, sigma = 1, N = 100): + """ + + :param diagram_1: The first diagram. + :type diagram_1: vector[pair[double, double]] + :param diagram_2: The second diagram. + :type diagram_2: vector[pair[double, double]] + :param sigma: bandwidth of Gaussian + :param N: number of directions + + :returns: the sliced wasserstein kernel. + """ + return sw(diagram_1, diagram_2, sigma, N) diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in index a8dd9f80..7f42968d 100644 --- a/src/cython/gudhi.pyx.in +++ b/src/cython/gudhi.pyx.in @@ -36,6 +36,7 @@ include '@CMAKE_CURRENT_SOURCE_DIR@/cython/persistence_graphical_tools.py' include '@CMAKE_CURRENT_SOURCE_DIR@/cython/reader_utils.pyx' include '@CMAKE_CURRENT_SOURCE_DIR@/cython/witness_complex.pyx' include '@CMAKE_CURRENT_SOURCE_DIR@/cython/strong_witness_complex.pyx' +include '@CMAKE_CURRENT_SOURCE_DIR@/cython/kernels.pyx' @GUDHI_CYTHON_ALPHA_COMPLEX@ @GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX@ @GUDHI_CYTHON_SUBSAMPLING@ diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h new file mode 100644 index 00000000..9eb610b0 --- /dev/null +++ b/src/cython/include/Kernels_interface.h @@ -0,0 +1,49 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef INCLUDE_KERNELS_INTERFACE_H_ +#define INCLUDE_KERNELS_INTERFACE_H_ + +#include + +#include +#include +#include // for std::pair + +namespace Gudhi { + +namespace persistence_diagram { + + double sw(const std::vector>& diag1, + const std::vector>& diag2, + double sigma, int N) { + Gudhi::Persistence_representations::Sliced_Wasserstein sw1(diag1, sigma, N); + Gudhi::Persistence_representations::Sliced_Wasserstein sw2(diag2, sigma, N); + return sw1.compute_scalar_product(sw2); + } + +} // namespace persistence_diagram + +} // namespace Gudhi + + +#endif // INCLUDE_KERNELS_INTERFACE_H_ -- cgit v1.2.3 From 784697ab263e30c062e92aacfce36d1ed4070c6f Mon Sep 17 00:00:00 2001 From: mcarrier Date: Tue, 6 Mar 2018 17:50:39 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3269 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 17860628d3250f689152cdf65432c5a61d76f4d2 --- .../example/sliced_wasserstein.cpp | 2 + .../include/gudhi/Sliced_Wasserstein.h | 63 +++++++++++++++------- src/cython/cython/kernels.pyx | 17 +++++- src/cython/include/Kernels_interface.h | 15 ++++++ 4 files changed, 77 insertions(+), 20 deletions(-) diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp index f153fbe8..2470029b 100644 --- a/src/Persistence_representations/example/sliced_wasserstein.cpp +++ b/src/Persistence_representations/example/sliced_wasserstein.cpp @@ -32,6 +32,8 @@ int main(int argc, char** argv) { std::vector > persistence1; std::vector > persistence2; + std::vector > > set1; + std::vector > > set2; persistence1.push_back(std::make_pair(1, 2)); persistence1.push_back(std::make_pair(6, 8)); diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index 6196e207..f2ec56b7 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -51,15 +51,47 @@ class Sliced_Wasserstein { PD diagram; int approx; double sigma; + std::vector > projections, projections_diagonal; + public: - Sliced_Wasserstein(PD _diagram){diagram = _diagram; approx = 100; sigma = 0.001;} - Sliced_Wasserstein(PD _diagram, double _sigma, int _approx){diagram = _diagram; approx = _approx; sigma = _sigma;} + void build_rep(){ + + if(approx > 0){ + + double step = pi/this->approx; + int n = diagram.size(); + + for (int i = 0; i < this->approx; i++){ + std::vector l,l_diag; + for (int j = 0; j < n; j++){ + + double px = diagram[j].first; double py = diagram[j].second; + double proj_diag = (px+py)/2; + + l.push_back ( px * cos(-pi/2+i*step) + py * sin(-pi/2+i*step) ); + l_diag.push_back ( proj_diag * cos(-pi/2+i*step) + proj_diag * sin(-pi/2+i*step) ); + } + + std::sort(l.begin(), l.end()); std::sort(l_diag.begin(), l_diag.end()); + projections.push_back(l); projections_diagonal.push_back(l_diag); + + } + + } + + } + + Sliced_Wasserstein(PD _diagram){diagram = _diagram; approx = 100; sigma = 0.001; build_rep();} + Sliced_Wasserstein(PD _diagram, double _sigma, int _approx){diagram = _diagram; approx = _approx; sigma = _sigma; build_rep();} + PD get_diagram(){return this->diagram;} int get_approx(){return this->approx;} double get_sigma(){return this->sigma;} + + // ********************************** // Utils. @@ -227,28 +259,19 @@ class Sliced_Wasserstein { else{ + double step = pi/this->approx; - // Add projections onto diagonal. - int n1, n2; n1 = diagram1.size(); n2 = diagram2.size(); - for (int i = 0; i < n2; i++) - diagram1.emplace_back( (diagram2[i].first + diagram2[i].second)/2, (diagram2[i].first + diagram2[i].second)/2 ); - for (int i = 0; i < n1; i++) - diagram2.emplace_back( (diagram1[i].first + diagram1[i].second)/2, (diagram1[i].first + diagram1[i].second)/2 ); - int n = diagram1.size(); - - // Sort and compare all projections. for (int i = 0; i < this->approx; i++){ - std::vector > l1, l2; - for (int j = 0; j < n; j++){ - l1.emplace_back( j, diagram1[j].first*cos(-pi/2+i*step) + diagram1[j].second*sin(-pi/2+i*step) ); - l2.emplace_back( j, diagram2[j].first*cos(-pi/2+i*step) + diagram2[j].second*sin(-pi/2+i*step) ); - } - std::sort(l1.begin(),l1.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); - std::sort(l2.begin(),l2.end(), [=](const std::pair & p1, const std::pair & p2){return p1.second < p2.second;}); - double f = 0; for (int j = 0; j < n; j++) f += std::abs(l1[j].second - l2[j].second); + + std::vector v1; std::vector l1 = this->projections[i]; std::vector l1bis = second.projections_diagonal[i]; std::merge(l1.begin(), l1.end(), l1bis.begin(), l1bis.end(), std::back_inserter(v1)); + std::vector v2; std::vector l2 = second.projections[i]; std::vector l2bis = this->projections_diagonal[i]; std::merge(l2.begin(), l2.end(), l2bis.begin(), l2bis.end(), std::back_inserter(v2)); + int n = v1.size(); double f = 0; + for (int j = 0; j < n; j++) f += std::abs(v1[j] - v2[j]); sw += f*step; + } + } return sw/pi; @@ -265,6 +288,8 @@ class Sliced_Wasserstein { } + + }; } // namespace Sliced_Wasserstein diff --git a/src/cython/cython/kernels.pyx b/src/cython/cython/kernels.pyx index 220fc6ce..f8798aab 100644 --- a/src/cython/cython/kernels.pyx +++ b/src/cython/cython/kernels.pyx @@ -30,7 +30,8 @@ __copyright__ = "Copyright (C) 2018 INRIA" __license__ = "GPL v3" cdef extern from "Kernels_interface.h" namespace "Gudhi::persistence_diagram": - double sw(vector[pair[double, double]], vector[pair[double, double]], double, int) + double sw (vector[pair[double, double]], vector[pair[double, double]], double, int) + vector[vector[double]] sw_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) def sliced_wasserstein(diagram_1, diagram_2, sigma = 1, N = 100): """ @@ -45,3 +46,17 @@ def sliced_wasserstein(diagram_1, diagram_2, sigma = 1, N = 100): :returns: the sliced wasserstein kernel. """ return sw(diagram_1, diagram_2, sigma, N) + +def sliced_wasserstein_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): + """ + + :param diagram_1: The first set of diagrams. + :type diagram_1: vector[vector[pair[double, double]]] + :param diagram_2: The second set of diagrams. + :type diagram_2: vector[vector[pair[double, double]]] + :param sigma: bandwidth of Gaussian + :param N: number of directions + + :returns: the sliced wasserstein kernel matrix. + """ + return sw_matrix(diagrams_1, diagrams_2, sigma, N) diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index 9eb610b0..ef136731 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -41,6 +41,21 @@ namespace persistence_diagram { return sw1.compute_scalar_product(sw2); } + std::vector > sw_matrix(const std::vector > >& s1, + const std::vector > >& s2, + double sigma, int N){ + std::vector > matrix; + std::vector ss1; + int num_diag_1 = s1.size(); for(int i = 0; i < num_diag_1; i++){Gudhi::Persistence_representations::Sliced_Wasserstein sw1(s1[i], sigma, N); ss1.push_back(sw1);} + std::vector ss2; + int num_diag_2 = s2.size(); for(int i = 0; i < num_diag_2; i++){Gudhi::Persistence_representations::Sliced_Wasserstein sw2(s2[i], sigma, N); ss2.push_back(sw2);} + for(int i = 0; i < num_diag_1; i++){ + std::cout << 100.0*i/num_diag_1 << " %" << std::endl; + std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(ss1[i].compute_scalar_product(ss2[j])); matrix.push_back(ps); + } + return matrix; + } + } // namespace persistence_diagram } // namespace Gudhi -- cgit v1.2.3 From 7f3ea79d26c78c8b2107a6a85feba933bd5512ac Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 27 Mar 2018 18:46:18 +0000 Subject: sb_wrapper working git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3308 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: c7400a11583b8b67483974664e410f4c77232fb1 --- .../example/example_rips_complex_from_fvecs.cpp | 8 +++++--- src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 17 +++++++++++------ src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 10 +++++----- src/Toplex_map/include/gudhi/Toplex_map.h | 7 ++++--- src/Toplex_map/test/toplex_map_unit_test.cpp | 2 ++ .../example/example_strong_witness_complex_fvecs.cpp | 8 +++++--- src/common/doc/main_page.h | 17 +++++++++++++++++ 7 files changed, 49 insertions(+), 20 deletions(-) diff --git a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp index 5e7667bd..1b683326 100644 --- a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp +++ b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp @@ -1,10 +1,11 @@ #include -// to construct Rips_complex from a fvecs file of points #include #include +#include #include #include + #include #include @@ -30,7 +31,8 @@ int main(int argc, char **argv) { using K = CGAL::Epick_d; using Point = typename K::Point_d; //using Simplex_tree = Gudhi::Simplex_tree<>; - using Simplex_tree = Gudhi::Fake_simplex_tree; + //using Simplex_tree = Gudhi::Fake_simplex_tree; + using Simplex_tree = Gudhi::Sb_wrapper; using Filtration_value = Simplex_tree::Filtration_value; using Rips_complex = Gudhi::rips_complex::Rips_complex; using Point_vector = std::vector; @@ -52,7 +54,7 @@ int main(int argc, char **argv) { end = clock(); std::cout << "Strong witness complex took "<< static_cast(end - start) / CLOCKS_PER_SEC << " s." << std::endl; - std::cout << "Rips complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices." << std::endl; + //std::cout << "Rips complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices." << std::endl; return 0; } diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h index abd815f9..55955e7b 100644 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h @@ -31,9 +31,6 @@ struct Visitor { class Fake_simplex_tree : public Filtered_toplex_map { public: - /** The type of the sets of Simplex_ptr. - * \ingroup toplex_map */ - typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; /** Handle type to a vertex contained in the simplicial complex. * \ingroup toplex_map */ @@ -87,6 +84,8 @@ public: * \ingroup toplex_map */ std::vector skeleton_simplex_range(int d) const; + Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); + protected: @@ -99,14 +98,14 @@ protected: template void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - toplex_maps.emplace(nan(""),Toplex_map()); + toplex_maps.emplace(nan(""), new Toplex_map()); using vertex_iterator = typename boost::graph_traits::vertex_iterator; vertex_iterator vi, vi_end; for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { Simplex s; s.insert(*vi); insert_simplex_and_subfaces(s); } - bron_kerbosch_all_cliques(skel_graph, Visitor(&(this->toplex_maps.at(nan(""))))); + bron_kerbosch_all_cliques(skel_graph, Visitor(this->toplex_maps.at(nan("")))); } void Fake_simplex_tree::expansion(int max_dim){} @@ -150,7 +149,7 @@ Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) co std::vector Fake_simplex_tree::max_simplices() const{ std::vector max_s; for(auto kv : toplex_maps) - for(const Toplex_map::Simplex_ptr& sptr : kv.second.maximal_cofaces(Simplex())) + for(const Toplex_map::Simplex_ptr& sptr : kv.second->maximal_cofaces(Simplex())) max_s.emplace_back(*sptr); return max_s; } @@ -178,6 +177,12 @@ std::vector Fake_simplex_tree::skeleton_simplex_range(int d return filtration_simplex_range(d); } +Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ + for(auto kv : toplex_maps) + kv.second->contraction(x,y,true); + return y; +} + } //namespace Gudhi #endif /* FAKE_SIMPLEX_TREE_H */ diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index 379c65dd..a3653acd 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -50,15 +50,15 @@ public: bool membership(const Input_vertex_range &vertex_range) const; protected: - std::map toplex_maps; + std::map toplex_maps; }; template std::pair Filtered_toplex_map::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f){ Simplex s(vertex_range.begin(),vertex_range.end()); if(membership(s)) return make_pair(s,false); - if(!toplex_maps.count(f)) toplex_maps.emplace(f,Toplex_map()); - toplex_maps.at(f).insert_simplex(vertex_range); + if(!toplex_maps.count(f)) toplex_maps.emplace(f,new Toplex_map()); + toplex_maps.at(f)->insert_simplex(vertex_range); return make_pair(s,true); } @@ -66,7 +66,7 @@ std::pair Filtered_toplex_map::insert_simplex_and_sub template Filtered_toplex_map::Filtration_value Filtered_toplex_map::filtration(const Input_vertex_range &vertex_range) const{ for(auto kv : toplex_maps) - if(kv.second.membership(vertex_range)) + if(kv.second->membership(vertex_range)) return kv.first; //min only because a map is ordered return nan(""); } @@ -74,7 +74,7 @@ Filtered_toplex_map::Filtration_value Filtered_toplex_map::filtration(const Inpu template bool Filtered_toplex_map::membership(const Input_vertex_range &vertex_range) const{ for(auto kv : toplex_maps) - if(kv.second.membership(vertex_range)) + if(kv.second->membership(vertex_range)) return true; return false; } diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index 00127baf..ccea34d5 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -69,7 +69,7 @@ public: * The edge has to verify the link condition if you want to preserve topology. * Returns the remaining vertex. * \ingroup toplex_map */ - Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); + Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y, bool force=false); /** Adds the given simplex to the complex. * The simplex must not have neither maximal face nor coface in the complex. @@ -196,17 +196,18 @@ Toplex_map::Simplex_ptr_set Toplex_map::maximal_cofaces(const Input_vertex_range return cofaces; } -Toplex_map::Vertex Toplex_map::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ +Toplex_map::Vertex Toplex_map::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y, bool force){ if(!t0.count(x)) return y; if(!t0.count(y)) return x; int k, d; - if(t0.at(x).size() > t0.at(y).size()) + if(force || (t0.at(x).size() > t0.at(y).size())) k=x, d=y; else k=y, d=x; for(const Toplex_map::Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ //Copy constructor needed because the set is modified Simplex sigma(*sptr); + Simplex s; s.insert(2); erase_maximal(sptr); sigma.erase(d); sigma.insert(k); diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp index b7a9251c..b714cd2a 100644 --- a/src/Toplex_map/test/toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/toplex_map_unit_test.cpp @@ -1,6 +1,8 @@ #include #include #include +#include + #define BOOST_TEST_DYN_LINK #define BOOST_TEST_MODULE "toplex map" diff --git a/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp index a8e16fb0..9f5b32c4 100644 --- a/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp +++ b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp @@ -22,6 +22,7 @@ #include #include +#include #include #include #include @@ -51,7 +52,8 @@ int main(int argc, char * const argv[]) { double alpha2 = atof(argv[3]); clock_t start, end; //Gudhi::Simplex_tree<> simplex_tree; - Gudhi::Fake_simplex_tree simplex_tree; + //Gudhi::Fake_simplex_tree simplex_tree; + Gudhi::Sb_wrapper simplex_tree; // Read the point file Point_vector point_vector, landmarks; @@ -73,7 +75,7 @@ int main(int argc, char * const argv[]) { end = clock(); std::cout << "Strong witness complex took " << static_cast(end - start) / CLOCKS_PER_SEC << " s. \n"; - std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << std::endl; - std::cout << "Max dimension is : " << simplex_tree.dimension() << std::endl; +// std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << std::endl; + // std::cout << "Max dimension is : " << simplex_tree.dimension() << std::endl; } diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index d6569f0c..71655824 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -133,6 +133,23 @@ + \subsection ToplexMapDataStructure Toplex Map + \image html "map.png" "Toplex map representation" + + + + + + \subsection WitnessComplexDataStructure Witness complex \image html "Witness_complex_representation.png" "Witness complex representation"
+ Author: François Godi
+ Introduced in: GUDHI 2.1.0
+ Copyright: GPL v3
+
+ The Toplex map data structure is composed firstly of a raw storage of toplices (the maximal simplices) + and secondly of a map which associate any vertex to a set of pointers toward all toplices + containing this vertex. + User manual: \ref toplex_map - Reference manual: Gudhi::Toplex_map +
-- cgit v1.2.3 From 7ffe37e7d0aa9d9977e29f6bb05637dc0f18dc74 Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 29 Mar 2018 09:36:23 +0000 Subject: wrappers repertory git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3312 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: b93766b7d2d49c7b55ddfec57ae400951fc71f82 --- src/Toplex_map/include/gudhi/Fake_simplex_tree.h | 189 --------------------- .../include/gudhi/wrappers/Fake_simplex_tree.h | 189 +++++++++++++++++++++ src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h | 109 ++++++++++++ 3 files changed, 298 insertions(+), 189 deletions(-) delete mode 100644 src/Toplex_map/include/gudhi/Fake_simplex_tree.h create mode 100644 src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h create mode 100644 src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h diff --git a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/Fake_simplex_tree.h deleted file mode 100644 index 55955e7b..00000000 --- a/src/Toplex_map/include/gudhi/Fake_simplex_tree.h +++ /dev/null @@ -1,189 +0,0 @@ -#ifndef FAKE_SIMPLEX_TREE_H -#define FAKE_SIMPLEX_TREE_H - -#include - -#include -#include - -#include -#include - -namespace Gudhi { - -struct Visitor { - Toplex_map* tm; - - Visitor(Toplex_map* tm) - :tm(tm) - {} - - template - void clique(const Clique& c, const Graph& g) - { - tm->insert_simplex(c); - } -}; - -/** Fake_simplex_tree is a wrapper for Filtered_toplex_map which has the interface of the Simplex_tree. - * Mostly for retro-compatibility purpose. If you use a function that output non maximal simplices, it will be non efficient. - * \ingroup toplex_map */ -class Fake_simplex_tree : public Filtered_toplex_map { - -public: - - /** Handle type to a vertex contained in the simplicial complex. - * \ingroup toplex_map */ - typedef Toplex_map::Vertex Vertex_handle; - - /** Handle type to a simplex contained in the simplicial complex. - * \ingroup toplex_map */ - typedef Toplex_map::Simplex Simplex_handle; - - typedef void Insertion_result_type; - - /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` - * in the simplicial complex. - * \ingroup toplex_map */ - template - void insert_graph(const OneSkeletonGraph& skel_graph); - - /** Do actually nothing. - * \ingroup toplex_map */ - void expansion(int max_dim); - - /** Returns the number of vertices stored i.e. the number of max simplices - * \ingroup toplex_map */ - std::size_t num_vertices() const; - - /** Returns the dimension of the complex. - * \ingroup toplex_map */ - std::size_t dimension() const; - - /** Returns the dimension of a given simplex in the complex. - * \ingroup toplex_map */ - std::size_t dimension(Simplex_ptr& sptr) const; - - /** Returns the number of simplices stored i.e. the number of maximal simplices. - * \ingroup toplex_map */ - std::size_t num_simplices() const; - - /** Returns a range over the vertices of a simplex. - * \ingroup toplex_map */ - Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; - - /** Returns a set of all maximal (critical if there is filtration values) simplices. - * \ingroup toplex_map */ - std::vector max_simplices() const; - - /** Returns all the simplices, of max dimension d if a parameter d is given. - * \ingroup toplex_map */ - std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; - - /** Returns all the simplices of max dimension d - * \ingroup toplex_map */ - std::vector skeleton_simplex_range(int d) const; - - Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); - - -protected: - - /** \internal Does all the facets of the given simplex belong to the complex ? - * \ingroup toplex_map */ - template - bool all_facets_inside(const Input_vertex_range &vertex_range) const; - -}; - -template -void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - toplex_maps.emplace(nan(""), new Toplex_map()); - using vertex_iterator = typename boost::graph_traits::vertex_iterator; - vertex_iterator vi, vi_end; - for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { - Simplex s; s.insert(*vi); - insert_simplex_and_subfaces(s); - } - bron_kerbosch_all_cliques(skel_graph, Visitor(this->toplex_maps.at(nan("")))); -} - -void Fake_simplex_tree::expansion(int max_dim){} - -template -bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ - Simplex sigma(vertex_range); - for(const Simplex& s : facets(sigma)) - if(!membership(s)) return false; - return true; -} - -std::size_t Fake_simplex_tree::dimension() const { - std::size_t max = 0; - for(const Simplex& s : max_simplices()) - max = std::max(max, s.size()); - return max-1; -} - -std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ - return sptr->size(); -} - -std::size_t Fake_simplex_tree::num_simplices() const { - //return filtration_simplex_range().size(); - return max_simplices().size(); -} - -std::size_t Fake_simplex_tree::num_vertices() const { - std::unordered_set vertices; - for(const Toplex_map::Simplex& s : max_simplices()) - for (Toplex_map::Vertex v : s) - vertices.emplace(v); - return vertices.size(); -} - -Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { - return s; -} - -std::vector Fake_simplex_tree::max_simplices() const{ - std::vector max_s; - for(auto kv : toplex_maps) - for(const Toplex_map::Simplex_ptr& sptr : kv.second->maximal_cofaces(Simplex())) - max_s.emplace_back(*sptr); - return max_s; -} - -std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ - std::vector m = max_simplices(); - std::vector range; - Toplex_map::Simplex_ptr_set seen; - while(m.begin()!=m.end()){ - Toplex_map::Simplex s(m.back()); - m.pop_back(); - if(seen.find(get_key(s))==seen.end()){ - if((int) s.size()-1 <=d) - range.emplace_back(s); - seen.emplace(get_key(s)); - if(s.size()>0) - for(Simplex& sigma : facets(s)) - m.emplace_back(sigma); - } - } - return range; -} - -std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ - return filtration_simplex_range(d); -} - -Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ - for(auto kv : toplex_maps) - kv.second->contraction(x,y,true); - return y; -} - -} //namespace Gudhi - -#endif /* FAKE_SIMPLEX_TREE_H */ - diff --git a/src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h new file mode 100644 index 00000000..55955e7b --- /dev/null +++ b/src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h @@ -0,0 +1,189 @@ +#ifndef FAKE_SIMPLEX_TREE_H +#define FAKE_SIMPLEX_TREE_H + +#include + +#include +#include + +#include +#include + +namespace Gudhi { + +struct Visitor { + Toplex_map* tm; + + Visitor(Toplex_map* tm) + :tm(tm) + {} + + template + void clique(const Clique& c, const Graph& g) + { + tm->insert_simplex(c); + } +}; + +/** Fake_simplex_tree is a wrapper for Filtered_toplex_map which has the interface of the Simplex_tree. + * Mostly for retro-compatibility purpose. If you use a function that output non maximal simplices, it will be non efficient. + * \ingroup toplex_map */ +class Fake_simplex_tree : public Filtered_toplex_map { + +public: + + /** Handle type to a vertex contained in the simplicial complex. + * \ingroup toplex_map */ + typedef Toplex_map::Vertex Vertex_handle; + + /** Handle type to a simplex contained in the simplicial complex. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex Simplex_handle; + + typedef void Insertion_result_type; + + /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` + * in the simplicial complex. + * \ingroup toplex_map */ + template + void insert_graph(const OneSkeletonGraph& skel_graph); + + /** Do actually nothing. + * \ingroup toplex_map */ + void expansion(int max_dim); + + /** Returns the number of vertices stored i.e. the number of max simplices + * \ingroup toplex_map */ + std::size_t num_vertices() const; + + /** Returns the dimension of the complex. + * \ingroup toplex_map */ + std::size_t dimension() const; + + /** Returns the dimension of a given simplex in the complex. + * \ingroup toplex_map */ + std::size_t dimension(Simplex_ptr& sptr) const; + + /** Returns the number of simplices stored i.e. the number of maximal simplices. + * \ingroup toplex_map */ + std::size_t num_simplices() const; + + /** Returns a range over the vertices of a simplex. + * \ingroup toplex_map */ + Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; + + /** Returns a set of all maximal (critical if there is filtration values) simplices. + * \ingroup toplex_map */ + std::vector max_simplices() const; + + /** Returns all the simplices, of max dimension d if a parameter d is given. + * \ingroup toplex_map */ + std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; + + /** Returns all the simplices of max dimension d + * \ingroup toplex_map */ + std::vector skeleton_simplex_range(int d) const; + + Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); + + +protected: + + /** \internal Does all the facets of the given simplex belong to the complex ? + * \ingroup toplex_map */ + template + bool all_facets_inside(const Input_vertex_range &vertex_range) const; + +}; + +template +void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ + toplex_maps.emplace(nan(""), new Toplex_map()); + using vertex_iterator = typename boost::graph_traits::vertex_iterator; + vertex_iterator vi, vi_end; + for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { + Simplex s; s.insert(*vi); + insert_simplex_and_subfaces(s); + } + bron_kerbosch_all_cliques(skel_graph, Visitor(this->toplex_maps.at(nan("")))); +} + +void Fake_simplex_tree::expansion(int max_dim){} + +template +bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ + Simplex sigma(vertex_range); + for(const Simplex& s : facets(sigma)) + if(!membership(s)) return false; + return true; +} + +std::size_t Fake_simplex_tree::dimension() const { + std::size_t max = 0; + for(const Simplex& s : max_simplices()) + max = std::max(max, s.size()); + return max-1; +} + +std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ + return sptr->size(); +} + +std::size_t Fake_simplex_tree::num_simplices() const { + //return filtration_simplex_range().size(); + return max_simplices().size(); +} + +std::size_t Fake_simplex_tree::num_vertices() const { + std::unordered_set vertices; + for(const Toplex_map::Simplex& s : max_simplices()) + for (Toplex_map::Vertex v : s) + vertices.emplace(v); + return vertices.size(); +} + +Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { + return s; +} + +std::vector Fake_simplex_tree::max_simplices() const{ + std::vector max_s; + for(auto kv : toplex_maps) + for(const Toplex_map::Simplex_ptr& sptr : kv.second->maximal_cofaces(Simplex())) + max_s.emplace_back(*sptr); + return max_s; +} + +std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ + std::vector m = max_simplices(); + std::vector range; + Toplex_map::Simplex_ptr_set seen; + while(m.begin()!=m.end()){ + Toplex_map::Simplex s(m.back()); + m.pop_back(); + if(seen.find(get_key(s))==seen.end()){ + if((int) s.size()-1 <=d) + range.emplace_back(s); + seen.emplace(get_key(s)); + if(s.size()>0) + for(Simplex& sigma : facets(s)) + m.emplace_back(sigma); + } + } + return range; +} + +std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ + return filtration_simplex_range(d); +} + +Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ + for(auto kv : toplex_maps) + kv.second->contraction(x,y,true); + return y; +} + +} //namespace Gudhi + +#endif /* FAKE_SIMPLEX_TREE_H */ + diff --git a/src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h b/src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h new file mode 100644 index 00000000..0bdf7b9c --- /dev/null +++ b/src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h @@ -0,0 +1,109 @@ +#include + +#ifndef SKELETON_BLOCKER_WRAPPER_H_ +#define SKELETON_BLOCKER_WRAPPER_H_ + +namespace Gudhi { + +class Sb_wrapper{ + +public: + + typedef Gudhi::skeleton_blocker::Skeleton_blocker_simple_traits Traits; + typedef Gudhi::skeleton_blocker::Skeleton_blocker_complex Complex; + + typedef Complex::Vertex_handle Vertex_handle; + + typedef Complex::Simplex Simplex; + + typedef Toplex_map::Vertex Vertex; + + typedef Toplex_map::Simplex_ptr Simplex_ptr; + + typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; + + typedef double Filtration_value; + + template + std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double); + + template + bool membership(const Input_vertex_range &vertex_range) const; + + typedef Toplex_map::Simplex Simplex_handle; + + typedef void Insertion_result_type; + + /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` + * in the simplicial complex. */ + template + void insert_graph(const OneSkeletonGraph& skel_graph); + + /** Do actually nothing. */ + void expansion(int max_dim); + + /** Returns the number of vertices stored i.e. the number of max simplices */ + std::size_t num_vertices() const; + + /** Returns the dimension of the complex. */ + std::size_t dimension() const; + + /** Returns the dimension of a given simplex in the complex. */ + std::size_t dimension(Simplex_ptr& sptr) const; + + /** Returns the number of simplices stored i.e. the number of maximal simplices. */ + std::size_t num_simplices() const; + + /** Returns a range over the vertices of a simplex. */ + Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; + + /** Returns a set of all maximal (critical if there is filtration values) simplices. */ + std::vector max_simplices() const; + + /** Returns all the simplices, of max dimension d if a parameter d is given. */ + std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; + + /** Returns all the simplices of max dimension d */ + std::vector skeleton_simplex_range(int d) const; + +private: + + Complex sb; + +}; + + +template +void Sb_wrapper::insert_graph(const OneSkeletonGraph& skel_graph){ + using vertex_iterator = typename boost::graph_traits::vertex_iterator; + vertex_iterator vi, vi_end; + // for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) + // insert_vertex(*vi); + //edges +} + +void Sb_wrapper::expansion(int max_dim){} + +template +std::pair Sb_wrapper::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double){ + Complex::Simplex s; + for (auto v : vertex_range) + s.add_vertex(Vertex_handle(v)); + if(sb.contains(s)) + return std::make_pair(false,false); + sb.add_simplex(s); + return std::make_pair(true,true); +} + +std::size_t Sb_wrapper::num_vertices() const{ + std::size_t num_vertices = 0; + for(auto v : sb.vertex_range()) + ++num_vertices; + return num_vertices; +} + + + +} + +#endif -- cgit v1.2.3 From 1c2d52705af9ae38cba9401bff66d9e8095d0c9e Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 29 Mar 2018 09:41:34 +0000 Subject: names git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3313 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a2d37ef72c86e5050408496fe1620a69c6c55242 --- src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h | 189 +++++++++++++++++++++ src/Toplex_map/include/gudhi/sb_to_st_wrapper.h | 109 ++++++++++++ .../include/gudhi/wrappers/Fake_simplex_tree.h | 189 --------------------- src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h | 109 ------------ 4 files changed, 298 insertions(+), 298 deletions(-) create mode 100644 src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h create mode 100644 src/Toplex_map/include/gudhi/sb_to_st_wrapper.h delete mode 100644 src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h delete mode 100644 src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h diff --git a/src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h b/src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h new file mode 100644 index 00000000..55955e7b --- /dev/null +++ b/src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h @@ -0,0 +1,189 @@ +#ifndef FAKE_SIMPLEX_TREE_H +#define FAKE_SIMPLEX_TREE_H + +#include + +#include +#include + +#include +#include + +namespace Gudhi { + +struct Visitor { + Toplex_map* tm; + + Visitor(Toplex_map* tm) + :tm(tm) + {} + + template + void clique(const Clique& c, const Graph& g) + { + tm->insert_simplex(c); + } +}; + +/** Fake_simplex_tree is a wrapper for Filtered_toplex_map which has the interface of the Simplex_tree. + * Mostly for retro-compatibility purpose. If you use a function that output non maximal simplices, it will be non efficient. + * \ingroup toplex_map */ +class Fake_simplex_tree : public Filtered_toplex_map { + +public: + + /** Handle type to a vertex contained in the simplicial complex. + * \ingroup toplex_map */ + typedef Toplex_map::Vertex Vertex_handle; + + /** Handle type to a simplex contained in the simplicial complex. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex Simplex_handle; + + typedef void Insertion_result_type; + + /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` + * in the simplicial complex. + * \ingroup toplex_map */ + template + void insert_graph(const OneSkeletonGraph& skel_graph); + + /** Do actually nothing. + * \ingroup toplex_map */ + void expansion(int max_dim); + + /** Returns the number of vertices stored i.e. the number of max simplices + * \ingroup toplex_map */ + std::size_t num_vertices() const; + + /** Returns the dimension of the complex. + * \ingroup toplex_map */ + std::size_t dimension() const; + + /** Returns the dimension of a given simplex in the complex. + * \ingroup toplex_map */ + std::size_t dimension(Simplex_ptr& sptr) const; + + /** Returns the number of simplices stored i.e. the number of maximal simplices. + * \ingroup toplex_map */ + std::size_t num_simplices() const; + + /** Returns a range over the vertices of a simplex. + * \ingroup toplex_map */ + Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; + + /** Returns a set of all maximal (critical if there is filtration values) simplices. + * \ingroup toplex_map */ + std::vector max_simplices() const; + + /** Returns all the simplices, of max dimension d if a parameter d is given. + * \ingroup toplex_map */ + std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; + + /** Returns all the simplices of max dimension d + * \ingroup toplex_map */ + std::vector skeleton_simplex_range(int d) const; + + Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); + + +protected: + + /** \internal Does all the facets of the given simplex belong to the complex ? + * \ingroup toplex_map */ + template + bool all_facets_inside(const Input_vertex_range &vertex_range) const; + +}; + +template +void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ + toplex_maps.emplace(nan(""), new Toplex_map()); + using vertex_iterator = typename boost::graph_traits::vertex_iterator; + vertex_iterator vi, vi_end; + for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { + Simplex s; s.insert(*vi); + insert_simplex_and_subfaces(s); + } + bron_kerbosch_all_cliques(skel_graph, Visitor(this->toplex_maps.at(nan("")))); +} + +void Fake_simplex_tree::expansion(int max_dim){} + +template +bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ + Simplex sigma(vertex_range); + for(const Simplex& s : facets(sigma)) + if(!membership(s)) return false; + return true; +} + +std::size_t Fake_simplex_tree::dimension() const { + std::size_t max = 0; + for(const Simplex& s : max_simplices()) + max = std::max(max, s.size()); + return max-1; +} + +std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ + return sptr->size(); +} + +std::size_t Fake_simplex_tree::num_simplices() const { + //return filtration_simplex_range().size(); + return max_simplices().size(); +} + +std::size_t Fake_simplex_tree::num_vertices() const { + std::unordered_set vertices; + for(const Toplex_map::Simplex& s : max_simplices()) + for (Toplex_map::Vertex v : s) + vertices.emplace(v); + return vertices.size(); +} + +Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { + return s; +} + +std::vector Fake_simplex_tree::max_simplices() const{ + std::vector max_s; + for(auto kv : toplex_maps) + for(const Toplex_map::Simplex_ptr& sptr : kv.second->maximal_cofaces(Simplex())) + max_s.emplace_back(*sptr); + return max_s; +} + +std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ + std::vector m = max_simplices(); + std::vector range; + Toplex_map::Simplex_ptr_set seen; + while(m.begin()!=m.end()){ + Toplex_map::Simplex s(m.back()); + m.pop_back(); + if(seen.find(get_key(s))==seen.end()){ + if((int) s.size()-1 <=d) + range.emplace_back(s); + seen.emplace(get_key(s)); + if(s.size()>0) + for(Simplex& sigma : facets(s)) + m.emplace_back(sigma); + } + } + return range; +} + +std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ + return filtration_simplex_range(d); +} + +Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ + for(auto kv : toplex_maps) + kv.second->contraction(x,y,true); + return y; +} + +} //namespace Gudhi + +#endif /* FAKE_SIMPLEX_TREE_H */ + diff --git a/src/Toplex_map/include/gudhi/sb_to_st_wrapper.h b/src/Toplex_map/include/gudhi/sb_to_st_wrapper.h new file mode 100644 index 00000000..0bdf7b9c --- /dev/null +++ b/src/Toplex_map/include/gudhi/sb_to_st_wrapper.h @@ -0,0 +1,109 @@ +#include + +#ifndef SKELETON_BLOCKER_WRAPPER_H_ +#define SKELETON_BLOCKER_WRAPPER_H_ + +namespace Gudhi { + +class Sb_wrapper{ + +public: + + typedef Gudhi::skeleton_blocker::Skeleton_blocker_simple_traits Traits; + typedef Gudhi::skeleton_blocker::Skeleton_blocker_complex Complex; + + typedef Complex::Vertex_handle Vertex_handle; + + typedef Complex::Simplex Simplex; + + typedef Toplex_map::Vertex Vertex; + + typedef Toplex_map::Simplex_ptr Simplex_ptr; + + typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; + + typedef double Filtration_value; + + template + std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double); + + template + bool membership(const Input_vertex_range &vertex_range) const; + + typedef Toplex_map::Simplex Simplex_handle; + + typedef void Insertion_result_type; + + /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` + * in the simplicial complex. */ + template + void insert_graph(const OneSkeletonGraph& skel_graph); + + /** Do actually nothing. */ + void expansion(int max_dim); + + /** Returns the number of vertices stored i.e. the number of max simplices */ + std::size_t num_vertices() const; + + /** Returns the dimension of the complex. */ + std::size_t dimension() const; + + /** Returns the dimension of a given simplex in the complex. */ + std::size_t dimension(Simplex_ptr& sptr) const; + + /** Returns the number of simplices stored i.e. the number of maximal simplices. */ + std::size_t num_simplices() const; + + /** Returns a range over the vertices of a simplex. */ + Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; + + /** Returns a set of all maximal (critical if there is filtration values) simplices. */ + std::vector max_simplices() const; + + /** Returns all the simplices, of max dimension d if a parameter d is given. */ + std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; + + /** Returns all the simplices of max dimension d */ + std::vector skeleton_simplex_range(int d) const; + +private: + + Complex sb; + +}; + + +template +void Sb_wrapper::insert_graph(const OneSkeletonGraph& skel_graph){ + using vertex_iterator = typename boost::graph_traits::vertex_iterator; + vertex_iterator vi, vi_end; + // for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) + // insert_vertex(*vi); + //edges +} + +void Sb_wrapper::expansion(int max_dim){} + +template +std::pair Sb_wrapper::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double){ + Complex::Simplex s; + for (auto v : vertex_range) + s.add_vertex(Vertex_handle(v)); + if(sb.contains(s)) + return std::make_pair(false,false); + sb.add_simplex(s); + return std::make_pair(true,true); +} + +std::size_t Sb_wrapper::num_vertices() const{ + std::size_t num_vertices = 0; + for(auto v : sb.vertex_range()) + ++num_vertices; + return num_vertices; +} + + + +} + +#endif diff --git a/src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h b/src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h deleted file mode 100644 index 55955e7b..00000000 --- a/src/Toplex_map/include/gudhi/wrappers/Fake_simplex_tree.h +++ /dev/null @@ -1,189 +0,0 @@ -#ifndef FAKE_SIMPLEX_TREE_H -#define FAKE_SIMPLEX_TREE_H - -#include - -#include -#include - -#include -#include - -namespace Gudhi { - -struct Visitor { - Toplex_map* tm; - - Visitor(Toplex_map* tm) - :tm(tm) - {} - - template - void clique(const Clique& c, const Graph& g) - { - tm->insert_simplex(c); - } -}; - -/** Fake_simplex_tree is a wrapper for Filtered_toplex_map which has the interface of the Simplex_tree. - * Mostly for retro-compatibility purpose. If you use a function that output non maximal simplices, it will be non efficient. - * \ingroup toplex_map */ -class Fake_simplex_tree : public Filtered_toplex_map { - -public: - - /** Handle type to a vertex contained in the simplicial complex. - * \ingroup toplex_map */ - typedef Toplex_map::Vertex Vertex_handle; - - /** Handle type to a simplex contained in the simplicial complex. - * \ingroup toplex_map */ - typedef Toplex_map::Simplex Simplex_handle; - - typedef void Insertion_result_type; - - /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` - * in the simplicial complex. - * \ingroup toplex_map */ - template - void insert_graph(const OneSkeletonGraph& skel_graph); - - /** Do actually nothing. - * \ingroup toplex_map */ - void expansion(int max_dim); - - /** Returns the number of vertices stored i.e. the number of max simplices - * \ingroup toplex_map */ - std::size_t num_vertices() const; - - /** Returns the dimension of the complex. - * \ingroup toplex_map */ - std::size_t dimension() const; - - /** Returns the dimension of a given simplex in the complex. - * \ingroup toplex_map */ - std::size_t dimension(Simplex_ptr& sptr) const; - - /** Returns the number of simplices stored i.e. the number of maximal simplices. - * \ingroup toplex_map */ - std::size_t num_simplices() const; - - /** Returns a range over the vertices of a simplex. - * \ingroup toplex_map */ - Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; - - /** Returns a set of all maximal (critical if there is filtration values) simplices. - * \ingroup toplex_map */ - std::vector max_simplices() const; - - /** Returns all the simplices, of max dimension d if a parameter d is given. - * \ingroup toplex_map */ - std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; - - /** Returns all the simplices of max dimension d - * \ingroup toplex_map */ - std::vector skeleton_simplex_range(int d) const; - - Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); - - -protected: - - /** \internal Does all the facets of the given simplex belong to the complex ? - * \ingroup toplex_map */ - template - bool all_facets_inside(const Input_vertex_range &vertex_range) const; - -}; - -template -void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - toplex_maps.emplace(nan(""), new Toplex_map()); - using vertex_iterator = typename boost::graph_traits::vertex_iterator; - vertex_iterator vi, vi_end; - for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { - Simplex s; s.insert(*vi); - insert_simplex_and_subfaces(s); - } - bron_kerbosch_all_cliques(skel_graph, Visitor(this->toplex_maps.at(nan("")))); -} - -void Fake_simplex_tree::expansion(int max_dim){} - -template -bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ - Simplex sigma(vertex_range); - for(const Simplex& s : facets(sigma)) - if(!membership(s)) return false; - return true; -} - -std::size_t Fake_simplex_tree::dimension() const { - std::size_t max = 0; - for(const Simplex& s : max_simplices()) - max = std::max(max, s.size()); - return max-1; -} - -std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ - return sptr->size(); -} - -std::size_t Fake_simplex_tree::num_simplices() const { - //return filtration_simplex_range().size(); - return max_simplices().size(); -} - -std::size_t Fake_simplex_tree::num_vertices() const { - std::unordered_set vertices; - for(const Toplex_map::Simplex& s : max_simplices()) - for (Toplex_map::Vertex v : s) - vertices.emplace(v); - return vertices.size(); -} - -Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { - return s; -} - -std::vector Fake_simplex_tree::max_simplices() const{ - std::vector max_s; - for(auto kv : toplex_maps) - for(const Toplex_map::Simplex_ptr& sptr : kv.second->maximal_cofaces(Simplex())) - max_s.emplace_back(*sptr); - return max_s; -} - -std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ - std::vector m = max_simplices(); - std::vector range; - Toplex_map::Simplex_ptr_set seen; - while(m.begin()!=m.end()){ - Toplex_map::Simplex s(m.back()); - m.pop_back(); - if(seen.find(get_key(s))==seen.end()){ - if((int) s.size()-1 <=d) - range.emplace_back(s); - seen.emplace(get_key(s)); - if(s.size()>0) - for(Simplex& sigma : facets(s)) - m.emplace_back(sigma); - } - } - return range; -} - -std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ - return filtration_simplex_range(d); -} - -Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ - for(auto kv : toplex_maps) - kv.second->contraction(x,y,true); - return y; -} - -} //namespace Gudhi - -#endif /* FAKE_SIMPLEX_TREE_H */ - diff --git a/src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h b/src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h deleted file mode 100644 index 0bdf7b9c..00000000 --- a/src/Toplex_map/include/gudhi/wrappers/Sb_wrapper.h +++ /dev/null @@ -1,109 +0,0 @@ -#include - -#ifndef SKELETON_BLOCKER_WRAPPER_H_ -#define SKELETON_BLOCKER_WRAPPER_H_ - -namespace Gudhi { - -class Sb_wrapper{ - -public: - - typedef Gudhi::skeleton_blocker::Skeleton_blocker_simple_traits Traits; - typedef Gudhi::skeleton_blocker::Skeleton_blocker_complex Complex; - - typedef Complex::Vertex_handle Vertex_handle; - - typedef Complex::Simplex Simplex; - - typedef Toplex_map::Vertex Vertex; - - typedef Toplex_map::Simplex_ptr Simplex_ptr; - - typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; - - typedef double Filtration_value; - - template - std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double); - - template - bool membership(const Input_vertex_range &vertex_range) const; - - typedef Toplex_map::Simplex Simplex_handle; - - typedef void Insertion_result_type; - - /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` - * in the simplicial complex. */ - template - void insert_graph(const OneSkeletonGraph& skel_graph); - - /** Do actually nothing. */ - void expansion(int max_dim); - - /** Returns the number of vertices stored i.e. the number of max simplices */ - std::size_t num_vertices() const; - - /** Returns the dimension of the complex. */ - std::size_t dimension() const; - - /** Returns the dimension of a given simplex in the complex. */ - std::size_t dimension(Simplex_ptr& sptr) const; - - /** Returns the number of simplices stored i.e. the number of maximal simplices. */ - std::size_t num_simplices() const; - - /** Returns a range over the vertices of a simplex. */ - Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; - - /** Returns a set of all maximal (critical if there is filtration values) simplices. */ - std::vector max_simplices() const; - - /** Returns all the simplices, of max dimension d if a parameter d is given. */ - std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; - - /** Returns all the simplices of max dimension d */ - std::vector skeleton_simplex_range(int d) const; - -private: - - Complex sb; - -}; - - -template -void Sb_wrapper::insert_graph(const OneSkeletonGraph& skel_graph){ - using vertex_iterator = typename boost::graph_traits::vertex_iterator; - vertex_iterator vi, vi_end; - // for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) - // insert_vertex(*vi); - //edges -} - -void Sb_wrapper::expansion(int max_dim){} - -template -std::pair Sb_wrapper::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double){ - Complex::Simplex s; - for (auto v : vertex_range) - s.add_vertex(Vertex_handle(v)); - if(sb.contains(s)) - return std::make_pair(false,false); - sb.add_simplex(s); - return std::make_pair(true,true); -} - -std::size_t Sb_wrapper::num_vertices() const{ - std::size_t num_vertices = 0; - for(auto v : sb.vertex_range()) - ++num_vertices; - return num_vertices; -} - - - -} - -#endif -- cgit v1.2.3 From 552ce6a0b42af77e210d252d8e8c5b25138a518b Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 29 Mar 2018 09:56:16 +0000 Subject: msg git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3314 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 2591252fddbe57daee885ad29e5df242b33f5f52 --- src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h | 189 ----------------------- src/Toplex_map/include/gudhi/sb_to_st_wrapper.h | 109 ------------- 2 files changed, 298 deletions(-) delete mode 100644 src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h delete mode 100644 src/Toplex_map/include/gudhi/sb_to_st_wrapper.h diff --git a/src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h b/src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h deleted file mode 100644 index 55955e7b..00000000 --- a/src/Toplex_map/include/gudhi/ftm_to_st_wrapper.h +++ /dev/null @@ -1,189 +0,0 @@ -#ifndef FAKE_SIMPLEX_TREE_H -#define FAKE_SIMPLEX_TREE_H - -#include - -#include -#include - -#include -#include - -namespace Gudhi { - -struct Visitor { - Toplex_map* tm; - - Visitor(Toplex_map* tm) - :tm(tm) - {} - - template - void clique(const Clique& c, const Graph& g) - { - tm->insert_simplex(c); - } -}; - -/** Fake_simplex_tree is a wrapper for Filtered_toplex_map which has the interface of the Simplex_tree. - * Mostly for retro-compatibility purpose. If you use a function that output non maximal simplices, it will be non efficient. - * \ingroup toplex_map */ -class Fake_simplex_tree : public Filtered_toplex_map { - -public: - - /** Handle type to a vertex contained in the simplicial complex. - * \ingroup toplex_map */ - typedef Toplex_map::Vertex Vertex_handle; - - /** Handle type to a simplex contained in the simplicial complex. - * \ingroup toplex_map */ - typedef Toplex_map::Simplex Simplex_handle; - - typedef void Insertion_result_type; - - /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` - * in the simplicial complex. - * \ingroup toplex_map */ - template - void insert_graph(const OneSkeletonGraph& skel_graph); - - /** Do actually nothing. - * \ingroup toplex_map */ - void expansion(int max_dim); - - /** Returns the number of vertices stored i.e. the number of max simplices - * \ingroup toplex_map */ - std::size_t num_vertices() const; - - /** Returns the dimension of the complex. - * \ingroup toplex_map */ - std::size_t dimension() const; - - /** Returns the dimension of a given simplex in the complex. - * \ingroup toplex_map */ - std::size_t dimension(Simplex_ptr& sptr) const; - - /** Returns the number of simplices stored i.e. the number of maximal simplices. - * \ingroup toplex_map */ - std::size_t num_simplices() const; - - /** Returns a range over the vertices of a simplex. - * \ingroup toplex_map */ - Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; - - /** Returns a set of all maximal (critical if there is filtration values) simplices. - * \ingroup toplex_map */ - std::vector max_simplices() const; - - /** Returns all the simplices, of max dimension d if a parameter d is given. - * \ingroup toplex_map */ - std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; - - /** Returns all the simplices of max dimension d - * \ingroup toplex_map */ - std::vector skeleton_simplex_range(int d) const; - - Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); - - -protected: - - /** \internal Does all the facets of the given simplex belong to the complex ? - * \ingroup toplex_map */ - template - bool all_facets_inside(const Input_vertex_range &vertex_range) const; - -}; - -template -void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - toplex_maps.emplace(nan(""), new Toplex_map()); - using vertex_iterator = typename boost::graph_traits::vertex_iterator; - vertex_iterator vi, vi_end; - for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { - Simplex s; s.insert(*vi); - insert_simplex_and_subfaces(s); - } - bron_kerbosch_all_cliques(skel_graph, Visitor(this->toplex_maps.at(nan("")))); -} - -void Fake_simplex_tree::expansion(int max_dim){} - -template -bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ - Simplex sigma(vertex_range); - for(const Simplex& s : facets(sigma)) - if(!membership(s)) return false; - return true; -} - -std::size_t Fake_simplex_tree::dimension() const { - std::size_t max = 0; - for(const Simplex& s : max_simplices()) - max = std::max(max, s.size()); - return max-1; -} - -std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ - return sptr->size(); -} - -std::size_t Fake_simplex_tree::num_simplices() const { - //return filtration_simplex_range().size(); - return max_simplices().size(); -} - -std::size_t Fake_simplex_tree::num_vertices() const { - std::unordered_set vertices; - for(const Toplex_map::Simplex& s : max_simplices()) - for (Toplex_map::Vertex v : s) - vertices.emplace(v); - return vertices.size(); -} - -Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { - return s; -} - -std::vector Fake_simplex_tree::max_simplices() const{ - std::vector max_s; - for(auto kv : toplex_maps) - for(const Toplex_map::Simplex_ptr& sptr : kv.second->maximal_cofaces(Simplex())) - max_s.emplace_back(*sptr); - return max_s; -} - -std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ - std::vector m = max_simplices(); - std::vector range; - Toplex_map::Simplex_ptr_set seen; - while(m.begin()!=m.end()){ - Toplex_map::Simplex s(m.back()); - m.pop_back(); - if(seen.find(get_key(s))==seen.end()){ - if((int) s.size()-1 <=d) - range.emplace_back(s); - seen.emplace(get_key(s)); - if(s.size()>0) - for(Simplex& sigma : facets(s)) - m.emplace_back(sigma); - } - } - return range; -} - -std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ - return filtration_simplex_range(d); -} - -Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ - for(auto kv : toplex_maps) - kv.second->contraction(x,y,true); - return y; -} - -} //namespace Gudhi - -#endif /* FAKE_SIMPLEX_TREE_H */ - diff --git a/src/Toplex_map/include/gudhi/sb_to_st_wrapper.h b/src/Toplex_map/include/gudhi/sb_to_st_wrapper.h deleted file mode 100644 index 0bdf7b9c..00000000 --- a/src/Toplex_map/include/gudhi/sb_to_st_wrapper.h +++ /dev/null @@ -1,109 +0,0 @@ -#include - -#ifndef SKELETON_BLOCKER_WRAPPER_H_ -#define SKELETON_BLOCKER_WRAPPER_H_ - -namespace Gudhi { - -class Sb_wrapper{ - -public: - - typedef Gudhi::skeleton_blocker::Skeleton_blocker_simple_traits Traits; - typedef Gudhi::skeleton_blocker::Skeleton_blocker_complex Complex; - - typedef Complex::Vertex_handle Vertex_handle; - - typedef Complex::Simplex Simplex; - - typedef Toplex_map::Vertex Vertex; - - typedef Toplex_map::Simplex_ptr Simplex_ptr; - - typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; - - typedef double Filtration_value; - - template - std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double); - - template - bool membership(const Input_vertex_range &vertex_range) const; - - typedef Toplex_map::Simplex Simplex_handle; - - typedef void Insertion_result_type; - - /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` - * in the simplicial complex. */ - template - void insert_graph(const OneSkeletonGraph& skel_graph); - - /** Do actually nothing. */ - void expansion(int max_dim); - - /** Returns the number of vertices stored i.e. the number of max simplices */ - std::size_t num_vertices() const; - - /** Returns the dimension of the complex. */ - std::size_t dimension() const; - - /** Returns the dimension of a given simplex in the complex. */ - std::size_t dimension(Simplex_ptr& sptr) const; - - /** Returns the number of simplices stored i.e. the number of maximal simplices. */ - std::size_t num_simplices() const; - - /** Returns a range over the vertices of a simplex. */ - Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; - - /** Returns a set of all maximal (critical if there is filtration values) simplices. */ - std::vector max_simplices() const; - - /** Returns all the simplices, of max dimension d if a parameter d is given. */ - std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; - - /** Returns all the simplices of max dimension d */ - std::vector skeleton_simplex_range(int d) const; - -private: - - Complex sb; - -}; - - -template -void Sb_wrapper::insert_graph(const OneSkeletonGraph& skel_graph){ - using vertex_iterator = typename boost::graph_traits::vertex_iterator; - vertex_iterator vi, vi_end; - // for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) - // insert_vertex(*vi); - //edges -} - -void Sb_wrapper::expansion(int max_dim){} - -template -std::pair Sb_wrapper::insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, double){ - Complex::Simplex s; - for (auto v : vertex_range) - s.add_vertex(Vertex_handle(v)); - if(sb.contains(s)) - return std::make_pair(false,false); - sb.add_simplex(s); - return std::make_pair(true,true); -} - -std::size_t Sb_wrapper::num_vertices() const{ - std::size_t num_vertices = 0; - for(auto v : sb.vertex_range()) - ++num_vertices; - return num_vertices; -} - - - -} - -#endif -- cgit v1.2.3 From acd156a740dce76d3ea5ea569e56f7a30c81a046 Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 29 Mar 2018 09:58:24 +0000 Subject: fvec simplex_tree git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3315 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: eb72df78970030b4835b020af679b5de5b2c2356 --- src/Rips_complex/example/example_rips_complex_from_fvecs.cpp | 4 ++-- src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp index 1b683326..e11dc3ea 100644 --- a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp +++ b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp @@ -30,9 +30,9 @@ int main(int argc, char **argv) { // Type definitions using K = CGAL::Epick_d; using Point = typename K::Point_d; - //using Simplex_tree = Gudhi::Simplex_tree<>; + using Simplex_tree = Gudhi::Simplex_tree<>; //using Simplex_tree = Gudhi::Fake_simplex_tree; - using Simplex_tree = Gudhi::Sb_wrapper; + //using Simplex_tree = Gudhi::Sb_wrapper; using Filtration_value = Simplex_tree::Filtration_value; using Rips_complex = Gudhi::rips_complex::Rips_complex; using Point_vector = std::vector; diff --git a/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp index 9f5b32c4..5c431ec1 100644 --- a/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp +++ b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp @@ -51,9 +51,9 @@ int main(int argc, char * const argv[]) { int nbL = atoi(argv[2]), lim_dim = atoi(argv[4]); double alpha2 = atof(argv[3]); clock_t start, end; - //Gudhi::Simplex_tree<> simplex_tree; + Gudhi::Simplex_tree<> simplex_tree; //Gudhi::Fake_simplex_tree simplex_tree; - Gudhi::Sb_wrapper simplex_tree; + //Gudhi::Sb_wrapper simplex_tree; // Read the point file Point_vector point_vector, landmarks; -- cgit v1.2.3 From 367ffaf3633eca5335b12d71b085a61b2818f7be Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 29 Mar 2018 10:07:50 +0000 Subject: compile git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3316 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: f156ee6e355e93a362784a27b143c755e66e1045 --- src/Rips_complex/example/example_rips_complex_from_fvecs.cpp | 4 ++-- src/Tangential_complex/example/example_basic.cpp | 6 +++--- src/Toplex_map/test/toplex_map_unit_test.cpp | 2 -- .../example/example_strong_witness_complex_fvecs.cpp | 4 ++-- src/Witness_complex/example/example_strong_witness_complex_off.cpp | 2 +- 5 files changed, 8 insertions(+), 10 deletions(-) diff --git a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp index e11dc3ea..40aab5dc 100644 --- a/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp +++ b/src/Rips_complex/example/example_rips_complex_from_fvecs.cpp @@ -1,7 +1,7 @@ #include #include -#include -#include +//#include +//#include #include #include diff --git a/src/Tangential_complex/example/example_basic.cpp b/src/Tangential_complex/example/example_basic.cpp index 39165397..ab35edf0 100644 --- a/src/Tangential_complex/example/example_basic.cpp +++ b/src/Tangential_complex/example/example_basic.cpp @@ -1,6 +1,6 @@ #include #include -#include +//#include #include @@ -38,8 +38,8 @@ int main(void) { tc.compute_tangential_complex(); // Export the TC into a Simplex_tree - //Gudhi::Simplex_tree<> stree; - Gudhi::Fake_simplex_tree stree; + Gudhi::Simplex_tree<> stree; + //Gudhi::Fake_simplex_tree stree; tc.create_complex(stree); // Display stats about inconsistencies diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp index b714cd2a..95ee7a02 100644 --- a/src/Toplex_map/test/toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/toplex_map_unit_test.cpp @@ -1,7 +1,5 @@ #include #include -#include -#include #define BOOST_TEST_DYN_LINK diff --git a/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp index 5c431ec1..3e0efa15 100644 --- a/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp +++ b/src/Witness_complex/example/example_strong_witness_complex_fvecs.cpp @@ -21,8 +21,8 @@ */ #include -#include -#include +//#include +//#include #include #include #include diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp index 6292e248..f195953b 100644 --- a/src/Witness_complex/example/example_strong_witness_complex_off.cpp +++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp @@ -21,7 +21,7 @@ */ #include -#include +//#include #include #include #include -- cgit v1.2.3 From 4e80b66cf5d4e6121149a12f3137e372e04d8588 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 29 Mar 2018 15:27:31 +0000 Subject: added doc + cython git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3319 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: cffc2e28ebf6fae46246c5abaac52b7328adf490 --- .../doc/Persistence_representations_doc.h | 62 +++++++++++++++++++ .../example/persistence_weighted_gaussian.cpp | 16 ++--- .../example/sliced_wasserstein.cpp | 12 ++-- .../include/gudhi/Persistence_weighted_gaussian.h | 72 +++++++++++++++++++--- .../include/gudhi/Sliced_Wasserstein.h | 58 +++++++++++++++-- src/cython/cython/kernels.pyx | 56 +++++++++++++++++ src/cython/include/Kernels_interface.h | 65 +++++++++++++++++++ 7 files changed, 315 insertions(+), 26 deletions(-) diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index 38bd3a21..6d4cc96c 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -250,6 +250,68 @@ namespace Persistence_representations { absolute value of differences between coordinates. A scalar product is a sum of products of values at the corresponding positions of two vectors. + + + + +\section sec_persistence_kernels Kernels on Persistence Diagrams + Reference manual: \ref Gudhi::Persistence_representations::Sliced_Wasserstein
+ Reference manual: \ref Gudhi::Persistence_representations::Persistence_weighted_gaussian
+ + Kernels for Persistence Diagrams can be regarded as infinite-dimensional vectorizations. More specifically, + they are similarity functions whose evaluations on pairs of persistence diagrams equals the scalar products + between images of these pairs under a map \f$\Phi\f$ taking values in a specific (possibly non Euclidean) Hilbert space \f$k(D_i, D_j) = \langle \Phi(D_i),\Phi(D_j)\rangle\f$. + Reciprocally, classical results of learning theory ensure that such a \f$\Phi\f$ exists for a given similarity function \f$k\f$ if and only if \f$k\f$ is positive semi-definite. + Kernels are designed for algorithms that can be kernelized, i.e., algorithms that only require to know scalar products between instances in order to run. + Examples of such algorithms include Support Vector Machines, Principal Component Analysis and Ridge Regression. + + There have been several attempts at defining kernels, i.e., positive semi-definite functions, between persistence diagrams within the last few years. We provide implementation + for three of them: + + \li the Persistence Scale Space Kernel---see \cite Reininghaus_Huber_ALL_PSSK, which is the classical scalar product between \f$L^2\f$ functions, where persistence diagrams + are turned into functions by centering and summing Gaussian functions over the diagram points and their symmetric counterparts w.r.t. the diagonal: \f$k(D_1,D_2)=\int \Phi(D_1)\Phi(D_2)\f$, + where \f$\Phi(D)=\sum_{p\in D} {\rm exp}\left(-\frac{\|p-\cdot\|_2^2}{2\sigma^2}\right)\f$. + + \li the Persistence Weighted Gaussian Kernel---see \cite Kusano_Fukumizu_Hiraoka_PWGK, which is a slight generalization of the previous kernel, is the scalar product between + weighted Kernel Mean Embeddings of persistence diagrams w.r.t. the Gaussian Kernel \f$k_G\f$ (with corresponding map \f$\Phi_G\f$) in \f$\mathbb{R}^2\f$: + \f$k(D_1,D_2)=\langle\sum_{p\in D_1} w(p)\Phi_G(p), \sum_{q\in D_2} w(q)\Phi_G(q)\rangle\f$ + + \li the Sliced Wasserstein Kernel---see \cite pmlr-v70-carriere17a, which takes the form of a Gaussian kernel with a specific distance between persistence diagrams + called the Sliced Wasserstein Distance: \f$k(D_1,D_2)={\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right)\f$ + + When launching: + + \code $> ./Sliced_Wasserstein + \endcode + + the program output is: + + \code $> Approx SW distance: 5.33648 + $> Exact SW distance: 5.33798 + $> Approx SW kernel: 0.0693743 + $> Exact SW kernel: 0.0693224 + $> Distance induced by approx SW kernel: 1.36428 + $> Distance induced by exact SW kernel: 1.3643 + \endcode + + + and when launching: + + \code $> ./Persistence_weighted_gaussian + \endcode + + the program output is: + + \code $> Approx PWG kernel: 1.21509 + $> Exact PWG kernel: 1.13628 + $> Distance induced by approx PWG kernel: 3.23354 + $> Distance induced by exact PWG kernel: 3.25697 + $> Approx Gaussian PWG kernel: 0.0194222 + $> Exact Gaussian PWG kernel: 0.0192524 + $> Approx PSS kernel: 0.134413 + $> Exact PSS kernel: 0.133394 + \endcode + */ /** @} */ // end defgroup Persistence_representations diff --git a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp index a0e820ea..d447f165 100644 --- a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp +++ b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp @@ -57,11 +57,11 @@ int main(int argc, char** argv) { // Linear PWG - std::cout << PWG1.compute_scalar_product (PWG2) << std::endl; - std::cout << PWGex1.compute_scalar_product (PWGex2) << std::endl; + std::cout << "Approx PWG kernel: " << PWG1.compute_scalar_product (PWG2) << std::endl; + std::cout << "Exact PWG kernel: " << PWGex1.compute_scalar_product (PWGex2) << std::endl; - std::cout << PWG1.distance (PWG2) << std::endl; - std::cout << PWGex1.distance (PWGex2) << std::endl; + std::cout << "Distance induced by approx PWG kernel: " << PWG1.distance (PWG2) << std::endl; + std::cout << "Distance induced by exact PWG kernel: " << PWGex1.distance (PWGex2) << std::endl; @@ -71,8 +71,8 @@ int main(int argc, char** argv) { // Gaussian PWG - std::cout << std::exp( -PWG1.distance (PWG2, 2) ) / (2*tau*tau) << std::endl; - std::cout << std::exp( -PWGex1.distance (PWGex2, 2) ) / (2*tau*tau) << std::endl; + std::cout << "Approx Gaussian PWG kernel: " << std::exp( -PWG1.distance (PWG2) ) / (2*tau*tau) << std::endl; + std::cout << "Exact Gaussian PWG kernel: " << std::exp( -PWGex1.distance (PWGex2) ) / (2*tau*tau) << std::endl; @@ -91,8 +91,8 @@ int main(int argc, char** argv) { PWG pwgex1(pd1, 2*std::sqrt(sigma), -1, PWG::pss_weight); PWG pwgex2(pd2, 2*std::sqrt(sigma), -1, PWG::pss_weight); - std::cout << pwg1.compute_scalar_product (pwg2) / (16*pi*sigma) << std::endl; - std::cout << pwgex1.compute_scalar_product (pwgex2) / (16*pi*sigma) << std::endl; + std::cout << "Approx PSS kernel: " << pwg1.compute_scalar_product (pwg2) / (16*pi*sigma) << std::endl; + std::cout << "Exact PSS kernel: " << pwgex1.compute_scalar_product (pwgex2) / (16*pi*sigma) << std::endl; diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp index 2470029b..f1aeea5c 100644 --- a/src/Persistence_representations/example/sliced_wasserstein.cpp +++ b/src/Persistence_representations/example/sliced_wasserstein.cpp @@ -32,8 +32,6 @@ int main(int argc, char** argv) { std::vector > persistence1; std::vector > persistence2; - std::vector > > set1; - std::vector > > set2; persistence1.push_back(std::make_pair(1, 2)); persistence1.push_back(std::make_pair(6, 8)); @@ -52,10 +50,12 @@ int main(int argc, char** argv) { SW swex1(persistence1, 1, -1); SW swex2(persistence2, 1, -1); - std::cout << sw1.compute_sliced_wasserstein_distance(sw2) << std::endl; - std::cout << swex1.compute_sliced_wasserstein_distance(swex2) << std::endl; - std::cout << sw1.compute_scalar_product(sw2) << std::endl; - std::cout << swex1.distance(swex2) << std::endl; + std::cout << "Approx SW distance: " << sw1.compute_sliced_wasserstein_distance(sw2) << std::endl; + std::cout << "Exact SW distance: " << swex1.compute_sliced_wasserstein_distance(swex2) << std::endl; + std::cout << "Approx SW kernel: " << sw1.compute_scalar_product(sw2) << std::endl; + std::cout << "Exact SW kernel: " << swex1.compute_scalar_product(swex2) << std::endl; + std::cout << "Distance induced by approx SW kernel: " << sw1.distance(sw2) << std::endl; + std::cout << "Distance induced by exact SW kernel: " << swex1.distance(swex2) << std::endl; return 0; } diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index a6efa72d..f824225a 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -45,7 +45,40 @@ using Weight = std::function) >; namespace Gudhi { namespace Persistence_representations { - +/** + * \class Persistence_weighted_gaussian gudhi/Persistence_weighted_gaussian.h + * \brief A class implementing the Persistence Weighted Gaussian Kernel and a specific case of it called the Persistence Scale Space Kernel. + * + * \ingroup Persistence_representations + * + * \details + * The Persistence Weighted Gaussian Kernel is built with Gaussian Kernel Mean Embedding, meaning that each persistence diagram is first + * sent to the Hilbert space of a Gaussian kernel with bandwidth parameter \f$\sigma >0\f$ using a weighted mean embedding \f$\Phi\f$: + * + * \f$ \Phi\,:\,D\,\rightarrow\,\sum_{p\in D}\,w(p)\,{\rm exp}\left(-\frac{\|p-\cdot\|_2^2}{2\sigma^2}\right) \f$, + * + * Usually, the weight function is chosen to be an arctan function of the distance of the point to the diagonal: + * \f$w(p) = {\rm arctan}(C\,|y-x|^\alpha)\f$, for some parameters \f$C,\alpha >0\f$. + * Then, their scalar product in this space is computed: + * + * \f$ k(D_1,D_2)=\langle\Phi(D_1),\Phi(D_2)\rangle + * \,=\,\sum_{p\in D_1}\,\sum_{q\in D_2}\,w(p)\,w(q)\,{\rm exp}\left(-\frac{\|p-q\|_2^2}{2\sigma^2}\right).\f$ + * + * Note that one may apply a second Gaussian kernel to their distance in this space and still get a kernel. + * + * It follows that the computation time is \f$O(n^2)\f$ where \f$n\f$ is the number of points + * in the diagrams. This time can be improved by computing approximations of the kernel + * with \f$m\f$ Fourier features \cite Rahimi07randomfeatures. In that case, the computation time becomes \f$O(mn)\f$. + * + * The Persistence Scale Space Kernel is a Persistence Weighted Gaussian Kernel between modified diagrams: + * the symmetric of each point with respect to the diagonal is first added in each diagram, and then the weight function + * is set to be +1 if the point is above the diagonal and -1 otherwise. + * + * For more details, please consult Persistence Weighted Kernel for Topological Data Analysis\cite Kusano_Fukumizu_Hiraoka_PWGK + * and A Stable Multi-Scale Kernel for Topological Machine Learning\cite Reininghaus_Huber_ALL_PSSK . + * It implements the following concepts: Topological_data_with_distances, Topological_data_with_scalar_product. + * +**/ class Persistence_weighted_gaussian{ protected: @@ -56,8 +89,17 @@ class Persistence_weighted_gaussian{ public: - Persistence_weighted_gaussian(PD _diagram){diagram = _diagram; sigma = 1.0; approx = 1000; weight = arctan_weight;} - Persistence_weighted_gaussian(PD _diagram, double _sigma, int _approx, Weight _weight){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} + /** \brief Persistence Weighted Gaussian Kernel constructor. + * \ingroup Persistence_weighted_gaussian + * + * @param[in] _diagram persistence diagram. + * @param[in] _sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] _approx number of random Fourier features in case of approximate computation, set to -1 for exact computation. + * @param[in] _weight weight function for the points in the diagrams. + * + */ + Persistence_weighted_gaussian(PD _diagram, double _sigma = 1.0, int _approx = 1000, Weight _weight = arctan_weight){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} + PD get_diagram(){return this->diagram;} double get_sigma(){return this->sigma;} int get_approx(){return this->approx;} @@ -68,7 +110,12 @@ class Persistence_weighted_gaussian{ // Utils. // ********************************** - + /** \brief Specific weight of Persistence Scale Space Kernel. + * \ingroup Persistence_weighted_gaussian + * + * @param[in] p point in 2D. + * + */ static double pss_weight(std::pair p){ if(p.second > p.first) return 1; else return -1; @@ -108,7 +155,12 @@ class Persistence_weighted_gaussian{ // Scalar product + distance. // ********************************** - + /** \brief Evaluation of the kernel on a pair of diagrams. + * \ingroup Persistence_weighted_gaussian + * + * @param[in] second other instance of class Persistence_weighted_gaussian. Warning: sigma, approx and weight parameters need to be the same for both instances!!! + * + */ double compute_scalar_product(Persistence_weighted_gaussian second){ PD diagram1 = this->diagram; PD diagram2 = second.diagram; @@ -131,11 +183,17 @@ class Persistence_weighted_gaussian{ } } - double distance(Persistence_weighted_gaussian second, double power = 1) { + /** \brief Evaluation of the distance between images of diagrams in the Hilbert space of the kernel. + * \ingroup Persistence_weighted_gaussian + * + * @param[in] second other instance of class Persistence_weighted_gaussian. Warning: sigma, approx and weight parameters need to be the same for both instances!!! + * + */ + double distance(Persistence_weighted_gaussian second) { if(this->sigma != second.get_sigma() || this->approx != second.get_approx()){ std::cout << "Error: different representations!" << std::endl; return 0; } - else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), power/2.0); + else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); } diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index f2ec56b7..bfb77384 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -45,6 +45,30 @@ using PD = std::vector >; namespace Gudhi { namespace Persistence_representations { +/** + * \class Sliced_Wasserstein gudhi/Sliced_Wasserstein.h + * \brief A class implementing the Sliced Wasserstein Kernel. + * + * \ingroup Persistence_representations + * + * \details + * The Sliced Wasserstein Kernel is defined as a Gaussian-like Kernel between persistence diagrams, where the distance used for + * comparison is the Sliced Wasserstein distance \f$SW\f$ between persistence diagrams, defined as the integral of the 1-norm + * between the sorted projections of the diagrams onto all lines passing through the origin: + * + * \f$ SW(D_1,D_2)=\int_{\theta\in\mathbb{S}}\,\|\pi_\theta(D_1\cup\pi_\Delta(D_2))-\pi_\theta(D_2\cup\pi_\Delta(D_1))\|_1{\rm d}\theta\f$, + * + * where \f$\pi_\theta\f$ is the projection onto the line defined with angle \f$\theta\f$ in the unit circle \f$\mathbb{S}\f$, + * and \f$\pi_\Delta\f$ is the projection onto the diagonal. + * The integral can be either computed exactly in \f$O(n^2{\rm log}(n))\f$ time, where \f$n\f$ is the number of points + * in the diagrams, or approximated by sampling \f$N\f$ lines in the circle in \f$O(Nn{\rm log}(n))\f$ time. The Sliced Wasserstein Kernel is then computed as: + * + * \f$ k(D_1,D_2) = {\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right).\f$ + * + * For more details, please consult Sliced Wasserstein Kernel for Persistence Diagrams\cite pmlr-v70-carriere17a . + * It implements the following concepts: Topological_data_with_distances, Topological_data_with_scalar_product. + * +**/ class Sliced_Wasserstein { protected: @@ -83,8 +107,15 @@ class Sliced_Wasserstein { } - Sliced_Wasserstein(PD _diagram){diagram = _diagram; approx = 100; sigma = 0.001; build_rep();} - Sliced_Wasserstein(PD _diagram, double _sigma, int _approx){diagram = _diagram; approx = _approx; sigma = _sigma; build_rep();} + /** \brief Sliced Wasserstein Kernel constructor. + * \ingroup Sliced_Wasserstein + * + * @param[in] _diagram persistence diagram. + * @param[in] _sigma bandwidth parameter. + * @param[in] _approx number of directions used to approximate the integral in the Sliced Wasserstein distance, set to -1 for exact computation. + * + */ + Sliced_Wasserstein(PD _diagram, double _sigma = 1.0, int _approx = 100){diagram = _diagram; approx = _approx; sigma = _sigma; build_rep();} PD get_diagram(){return this->diagram;} int get_approx(){return this->approx;} @@ -163,6 +194,12 @@ class Sliced_Wasserstein { // Scalar product + distance. // ********************************** + /** \brief Evaluation of the Sliced Wasserstein Distance between a pair of diagrams. + * \ingroup Sliced_Wasserstein + * + * @param[in] second other instance of class Sliced_Wasserstein. Warning: approx parameter needs to be the same for both instances!!! + * + */ double compute_sliced_wasserstein_distance(Sliced_Wasserstein second) { PD diagram1 = this->diagram; PD diagram2 = second.diagram; double sw = 0; @@ -277,14 +314,25 @@ class Sliced_Wasserstein { return sw/pi; } - + /** \brief Evaluation of the kernel on a pair of diagrams. + * \ingroup Sliced_Wasserstein + * + * @param[in] second other instance of class Sliced_Wasserstein. Warning: sigma and approx parameters need to be the same for both instances!!! + * + */ double compute_scalar_product(Sliced_Wasserstein second){ return std::exp(-compute_sliced_wasserstein_distance(second)/(2*this->sigma*this->sigma)); } - double distance(Sliced_Wasserstein second, double power = 1) { + /** \brief Evaluation of the distance between images of diagrams in the Hilbert space of the kernel. + * \ingroup Sliced_Wasserstein + * + * @param[in] second other instance of class Sliced_Wasserstein. Warning: sigma and approx parameters need to be the same for both instances!!! + * + */ + double distance(Sliced_Wasserstein second) { if(this->sigma != second.sigma || this->approx != second.approx){std::cout << "Error: different representations!" << std::endl; return 0;} - else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), power/2.0); + else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); } diff --git a/src/cython/cython/kernels.pyx b/src/cython/cython/kernels.pyx index f8798aab..4fc21f03 100644 --- a/src/cython/cython/kernels.pyx +++ b/src/cython/cython/kernels.pyx @@ -60,3 +60,59 @@ def sliced_wasserstein_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): :returns: the sliced wasserstein kernel matrix. """ return sw_matrix(diagrams_1, diagrams_2, sigma, N) + +def persistence_weighted_gaussian(diagram_1, diagram_2, sigma = 1, N = 100): + """ + + :param diagram_1: The first diagram. + :type diagram_1: vector[pair[double, double]] + :param diagram_2: The second diagram. + :type diagram_2: vector[pair[double, double]] + :param sigma: bandwidth of Gaussian + :param N: number of Fourier features + + :returns: the persistence weighted gaussian kernel. + """ + return pwg(diagram_1, diagram_2, sigma, N) + +def persistence_weighted_gaussian_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): + """ + + :param diagram_1: The first set of diagrams. + :type diagram_1: vector[vector[pair[double, double]]] + :param diagram_2: The second set of diagrams. + :type diagram_2: vector[vector[pair[double, double]]] + :param sigma: bandwidth of Gaussian + :param N: number of Fourier features + + :returns: the persistence weighted gaussian kernel matrix. + """ + return pwg_matrix(diagrams_1, diagrams_2, sigma, N) + +def persistence_scale_space(diagram_1, diagram_2, sigma = 1, N = 100): + """ + + :param diagram_1: The first diagram. + :type diagram_1: vector[pair[double, double]] + :param diagram_2: The second diagram. + :type diagram_2: vector[pair[double, double]] + :param sigma: bandwidth of Gaussian + :param N: number of Fourier features + + :returns: the persistence scale space kernel. + """ + return pss(diagram_1, diagram_2, sigma, N) + +def persistence_scale_space_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): + """ + + :param diagram_1: The first set of diagrams. + :type diagram_1: vector[vector[pair[double, double]]] + :param diagram_2: The second set of diagrams. + :type diagram_2: vector[vector[pair[double, double]]] + :param sigma: bandwidth of Gaussian + :param N: number of Fourier features + + :returns: the persistence scale space kernel matrix. + """ + return pss_matrix(diagrams_1, diagrams_2, sigma, N) diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index ef136731..17bb5d8b 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -41,6 +41,38 @@ namespace persistence_diagram { return sw1.compute_scalar_product(sw2); } + double pwg(const std::vector>& diag1, + const std::vector>& diag2, + double sigma, int N) { + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::arctan_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::arctan_weight); + return pwg1.compute_scalar_product(pwg2); + } + + double pss(const std::vector>& diag1, + const std::vector>& diag2, + double sigma, int N) { + + std::vector> pd1 = diag1; int numpts = diag1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(diag1[i].second,diag1[i].first); + std::vector> pd2 = diag2; numpts = diag2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(diag2[i].second,diag2[i].first); + + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + + return pwg1.compute_scalar_product (pwg2) / (16*pi*sigma); + } + + double pss_sym(const std::vector>& diag1, + const std::vector>& diag2, + double sigma, int N) { + + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + + return pwg1.compute_scalar_product (pwg2) / (16*pi*sigma); + } + + std::vector > sw_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N){ @@ -56,6 +88,39 @@ namespace persistence_diagram { return matrix; } + std::vector > pwg_matrix(const std::vector > >& s1, + const std::vector > >& s2, + double sigma, int N){ + std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); + for(int i = 0; i < num_diag_1; i++){ + std::cout << 100.0*i/num_diag_1 << " %" << std::endl; + std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pwg(s1[i], s2[j], sigma, N)); matrix.push_back(ps); + } + return matrix; + } + + std::vector > pss_matrix(const std::vector > >& s1, + const std::vector > >& s2, + double sigma, int N){ + std::vector > > ss1, ss2; + std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); + for(int i = 0; i < num_diag_1; i++){ + std::vector> pd1 = s1[i]; int numpts = s1[i].size(); + for(int j = 0; j < numpts; j++) pd1.emplace_back(s1[i][j].second,s1[i][j].first); + ss1.push_back(pd1); + + for(int i = 0; i < num_diag_2; i++){ + std::vector> pd2 = s2[i]; int numpts = s2[i].size(); + for(int j = 0; j < numpts; j++) pd2.emplace_back(s2[i][j].second,s2[i][j].first); + ss2.push_back(pd2); + + for(int i = 0; i < num_diag_1; i++){ + std::cout << 100.0*i/num_diag_1 << " %" << std::endl; + std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pss_sym(ss1[i], ss2[j], sigma, N)); matrix.push_back(ps); + } + return matrix; + } + } // namespace persistence_diagram } // namespace Gudhi -- cgit v1.2.3 From 75580c5ea4da0a2bc1a75424a9d36e25eabac8d8 Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 29 Mar 2018 18:16:46 +0000 Subject: fvecs reader git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3321 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a57c3e9b3258a25ff1d59f0b8ce135ebb19188cf --- src/common/include/gudhi/Points_fvecs_reader.h | 68 ++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 src/common/include/gudhi/Points_fvecs_reader.h diff --git a/src/common/include/gudhi/Points_fvecs_reader.h b/src/common/include/gudhi/Points_fvecs_reader.h new file mode 100644 index 00000000..eb03bb72 --- /dev/null +++ b/src/common/include/gudhi/Points_fvecs_reader.h @@ -0,0 +1,68 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2015 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +#ifndef POINTS_FVECS_READER_H_ +#define POINTS_FVECS_READER_H_ + +namespace Gudhi { + +template +bool load_points_from_fvecs_file(const std::string &filename, OutputIteratorPoints points, int only_the_first_n_points = -1) +{ + typedef typename Kernel::Point_d Point; + + std::ifstream in(filename, std::ios::binary); + if (!in.is_open()) { + std::cerr << "Could not open '" << filename << "'" << std::endl; + return false; + } + + Kernel k; + unsigned long pt_dim = 0; + + in.read(reinterpret_cast(&pt_dim), 4); + std::vector current_pt; + current_pt.reserve(pt_dim); + for (int c = 0; !in.fail() && c != only_the_first_n_points; ++c) { + + for (int j = 0; j < pt_dim; ++j) + { + float coord = 0.f; + in.read(reinterpret_cast(&coord), 4); + current_pt.push_back(coord); + } + + *points++ = Point(current_pt.begin(), current_pt.end()); + current_pt.clear(); + in.read(reinterpret_cast(&pt_dim), 4); + } + +#ifdef DEBUG_TRACES + std::cerr << "'" << filename << "' loaded." << std::endl; +#endif + + return true; +} + + +} // namespace Gudhi + +#endif // POINTS_FVECS_READER_H_ -- cgit v1.2.3 From 860fa7d916cb591cb0b016b046077d5333570731 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 30 Mar 2018 18:09:57 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3323 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 550a4c3f9050b542203ab53ea7ef5ae1c8032f38 --- .../include/gudhi/Persistence_weighted_gaussian.h | 3 ++- .../include/gudhi/Sliced_Wasserstein.h | 6 +++--- src/cython/cython/kernels.pyx | 4 ++++ src/cython/include/Kernels_interface.h | 19 +++++++++++-------- 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index f824225a..d5c8e6d7 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -39,7 +39,6 @@ #include #include -double pi = boost::math::constants::pi(); using PD = std::vector >; using Weight = std::function) >; @@ -88,6 +87,8 @@ class Persistence_weighted_gaussian{ int approx; public: + + double pi = boost::math::constants::pi(); /** \brief Persistence Weighted Gaussian Kernel constructor. * \ingroup Persistence_weighted_gaussian diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index bfb77384..fc3cd142 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -39,7 +39,6 @@ #include #include -double pi = boost::math::constants::pi(); using PD = std::vector >; namespace Gudhi { @@ -77,13 +76,14 @@ class Sliced_Wasserstein { double sigma; std::vector > projections, projections_diagonal; - public: + double pi = boost::math::constants::pi(); + void build_rep(){ if(approx > 0){ - + double step = pi/this->approx; int n = diagram.size(); diff --git a/src/cython/cython/kernels.pyx b/src/cython/cython/kernels.pyx index 4fc21f03..466917b1 100644 --- a/src/cython/cython/kernels.pyx +++ b/src/cython/cython/kernels.pyx @@ -32,6 +32,10 @@ __license__ = "GPL v3" cdef extern from "Kernels_interface.h" namespace "Gudhi::persistence_diagram": double sw (vector[pair[double, double]], vector[pair[double, double]], double, int) vector[vector[double]] sw_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) + double pss (vector[pair[double, double]], vector[pair[double, double]], double, int) + vector[vector[double]] pss_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) + double pwg (vector[pair[double, double]], vector[pair[double, double]], double, int) + vector[vector[double]] pwg_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) def sliced_wasserstein(diagram_1, diagram_2, sigma = 1, N = 100): """ diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index 17bb5d8b..33cd6e35 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -24,6 +24,7 @@ #define INCLUDE_KERNELS_INTERFACE_H_ #include +#include #include #include @@ -44,20 +45,20 @@ namespace persistence_diagram { double pwg(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::arctan_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::arctan_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight); return pwg1.compute_scalar_product(pwg2); } double pss(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - + double pi = boost::math::constants::pi(); std::vector> pd1 = diag1; int numpts = diag1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(diag1[i].second,diag1[i].first); std::vector> pd2 = diag2; numpts = diag2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(diag2[i].second,diag2[i].first); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); return pwg1.compute_scalar_product (pwg2) / (16*pi*sigma); } @@ -65,9 +66,9 @@ namespace persistence_diagram { double pss_sym(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + double pi = boost::math::constants::pi(); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); return pwg1.compute_scalar_product (pwg2) / (16*pi*sigma); } @@ -108,11 +109,13 @@ namespace persistence_diagram { std::vector> pd1 = s1[i]; int numpts = s1[i].size(); for(int j = 0; j < numpts; j++) pd1.emplace_back(s1[i][j].second,s1[i][j].first); ss1.push_back(pd1); + } for(int i = 0; i < num_diag_2; i++){ std::vector> pd2 = s2[i]; int numpts = s2[i].size(); for(int j = 0; j < numpts; j++) pd2.emplace_back(s2[i][j].second,s2[i][j].first); ss2.push_back(pd2); + } for(int i = 0; i < num_diag_1; i++){ std::cout << 100.0*i/num_diag_1 << " %" << std::endl; -- cgit v1.2.3 From 905be209a0e62121c125c37e01f4d2eae5aa606d Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 12 Apr 2018 15:22:45 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3378 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 7cd9f2ae7c9da5d525bdb76a00ffac1359a47da7 --- .../example/persistence_weighted_gaussian.cpp | 4 ++-- .../include/gudhi/Persistence_weighted_gaussian.h | 12 +++++------ .../include/gudhi/Sliced_Wasserstein.h | 23 +++++++++++++--------- .../gudhi/common_persistence_representations.h | 2 ++ src/cython/include/Kernels_interface.h | 6 ++---- 5 files changed, 25 insertions(+), 22 deletions(-) diff --git a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp index d447f165..dea5dab6 100644 --- a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp +++ b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp @@ -91,8 +91,8 @@ int main(int argc, char** argv) { PWG pwgex1(pd1, 2*std::sqrt(sigma), -1, PWG::pss_weight); PWG pwgex2(pd2, 2*std::sqrt(sigma), -1, PWG::pss_weight); - std::cout << "Approx PSS kernel: " << pwg1.compute_scalar_product (pwg2) / (16*pi*sigma) << std::endl; - std::cout << "Exact PSS kernel: " << pwgex1.compute_scalar_product (pwgex2) / (16*pi*sigma) << std::endl; + std::cout << "Approx PSS kernel: " << pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma) << std::endl; + std::cout << "Exact PSS kernel: " << pwgex1.compute_scalar_product (pwgex2) / (16*Gudhi::Persistence_representations::pi*sigma) << std::endl; diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index d5c8e6d7..b30e0273 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -25,6 +25,7 @@ // gudhi include #include +#include // standard include #include @@ -37,7 +38,6 @@ #include #include #include -#include using PD = std::vector >; using Weight = std::function) >; @@ -87,8 +87,6 @@ class Persistence_weighted_gaussian{ int approx; public: - - double pi = boost::math::constants::pi(); /** \brief Persistence Weighted Gaussian Kernel constructor. * \ingroup Persistence_weighted_gaussian @@ -101,10 +99,10 @@ class Persistence_weighted_gaussian{ */ Persistence_weighted_gaussian(PD _diagram, double _sigma = 1.0, int _approx = 1000, Weight _weight = arctan_weight){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} - PD get_diagram(){return this->diagram;} - double get_sigma(){return this->sigma;} - int get_approx(){return this->approx;} - Weight get_weight(){return this->weight;} + PD get_diagram() const {return this->diagram;} + double get_sigma() const {return this->sigma;} + int get_approx() const {return this->approx;} + Weight get_weight() const {return this->weight;} // ********************************** diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index fc3cd142..6a9a607e 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -25,6 +25,8 @@ // gudhi include #include +#include +#include // standard include #include @@ -37,7 +39,6 @@ #include #include #include -#include using PD = std::vector >; @@ -68,6 +69,7 @@ namespace Persistence_representations { * It implements the following concepts: Topological_data_with_distances, Topological_data_with_scalar_product. * **/ + class Sliced_Wasserstein { protected: @@ -78,8 +80,6 @@ class Sliced_Wasserstein { public: - double pi = boost::math::constants::pi(); - void build_rep(){ if(approx > 0){ @@ -117,9 +117,9 @@ class Sliced_Wasserstein { */ Sliced_Wasserstein(PD _diagram, double _sigma = 1.0, int _approx = 100){diagram = _diagram; approx = _approx; sigma = _sigma; build_rep();} - PD get_diagram(){return this->diagram;} - int get_approx(){return this->approx;} - double get_sigma(){return this->sigma;} + PD get_diagram() const {return this->diagram;} + int get_approx() const {return this->approx;} + double get_sigma() const {return this->sigma;} @@ -197,11 +197,15 @@ class Sliced_Wasserstein { /** \brief Evaluation of the Sliced Wasserstein Distance between a pair of diagrams. * \ingroup Sliced_Wasserstein * - * @param[in] second other instance of class Sliced_Wasserstein. Warning: approx parameter needs to be the same for both instances!!! + * @param[in] second other instance of class Sliced_Wasserstein. + * For warning in red: + * @warning approx parameter needs to be the same for both instances. * */ double compute_sliced_wasserstein_distance(Sliced_Wasserstein second) { + GUDHI_CHECK(this->approx != second.approx, std::invalid_argument("Error: different approx values for representations")); + PD diagram1 = this->diagram; PD diagram2 = second.diagram; double sw = 0; if(this->approx == -1){ @@ -321,6 +325,7 @@ class Sliced_Wasserstein { * */ double compute_scalar_product(Sliced_Wasserstein second){ + GUDHI_CHECK(this->sigma != second.sigma, std::invalid_argument("Error: different sigma values for representations")); return std::exp(-compute_sliced_wasserstein_distance(second)/(2*this->sigma*this->sigma)); } @@ -331,8 +336,8 @@ class Sliced_Wasserstein { * */ double distance(Sliced_Wasserstein second) { - if(this->sigma != second.sigma || this->approx != second.approx){std::cout << "Error: different representations!" << std::endl; return 0;} - else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); + GUDHI_CHECK(this->sigma != second.sigma, std::invalid_argument("Error: different sigma values for representations")); + return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); } diff --git a/src/Persistence_representations/include/gudhi/common_persistence_representations.h b/src/Persistence_representations/include/gudhi/common_persistence_representations.h index 44e125a7..90f2626d 100644 --- a/src/Persistence_representations/include/gudhi/common_persistence_representations.h +++ b/src/Persistence_representations/include/gudhi/common_persistence_representations.h @@ -26,11 +26,13 @@ #include #include #include +#include namespace Gudhi { namespace Persistence_representations { // this file contain an implementation of some common procedures used in Persistence_representations. +static constexpr double pi = boost::math::constants::pi(); // double epsi = std::numeric_limits::epsilon(); double epsi = 0.000005; diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index 33cd6e35..1742d016 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -53,24 +53,22 @@ namespace persistence_diagram { double pss(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - double pi = boost::math::constants::pi(); std::vector> pd1 = diag1; int numpts = diag1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(diag1[i].second,diag1[i].first); std::vector> pd2 = diag2; numpts = diag2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(diag2[i].second,diag2[i].first); Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); - return pwg1.compute_scalar_product (pwg2) / (16*pi*sigma); + return pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma); } double pss_sym(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - double pi = boost::math::constants::pi(); Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); - return pwg1.compute_scalar_product (pwg2) / (16*pi*sigma); + return pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma); } -- cgit v1.2.3 From 7f9e8f11f70e8387ef29c3fa13016121dca79cbe Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 20 Apr 2018 15:56:06 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3385 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 2a83e62e132ce406986efa1456b5f1bca6b93691 --- src/Kernels/doc/COPYRIGHT | 19 -- src/Kernels/doc/Intro_kernels.h | 108 ------ src/Kernels/example/CMakeLists.txt | 10 - src/Kernels/example/kernel.txt | 8 - src/Kernels/example/kernel_basic_example.cpp | 65 ---- src/Kernels/include/gudhi/kernel.h | 365 --------------------- src/Kernels/test/CMakeLists.txt | 12 - src/Kernels/test/test_kernel.cpp | 56 ---- .../example/CMakeLists.txt | 9 + .../example/landscape.cpp | 51 +++ .../example/persistence_image.cpp | 54 +++ .../example/persistence_weighted_gaussian.cpp | 8 +- .../include/gudhi/Landscape.h | 103 ++++++ .../include/gudhi/Persistence_image.h | 117 +++++++ .../include/gudhi/Persistence_weighted_gaussian.h | 15 +- src/cython/cython/kernels.pyx | 16 +- src/cython/cython/vectors.pyx | 65 ++++ src/cython/gudhi.pyx.in | 1 + src/cython/include/Kernels_interface.h | 46 ++- 19 files changed, 440 insertions(+), 688 deletions(-) delete mode 100644 src/Kernels/doc/COPYRIGHT delete mode 100644 src/Kernels/doc/Intro_kernels.h delete mode 100644 src/Kernels/example/CMakeLists.txt delete mode 100644 src/Kernels/example/kernel.txt delete mode 100644 src/Kernels/example/kernel_basic_example.cpp delete mode 100644 src/Kernels/include/gudhi/kernel.h delete mode 100644 src/Kernels/test/CMakeLists.txt delete mode 100644 src/Kernels/test/test_kernel.cpp create mode 100644 src/Persistence_representations/example/landscape.cpp create mode 100644 src/Persistence_representations/example/persistence_image.cpp create mode 100644 src/Persistence_representations/include/gudhi/Landscape.h create mode 100644 src/Persistence_representations/include/gudhi/Persistence_image.h create mode 100644 src/cython/cython/vectors.pyx diff --git a/src/Kernels/doc/COPYRIGHT b/src/Kernels/doc/COPYRIGHT deleted file mode 100644 index 0c36a526..00000000 --- a/src/Kernels/doc/COPYRIGHT +++ /dev/null @@ -1,19 +0,0 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. - -Author(s): Mathieu Carrière - -Copyright (C) 2017 INRIA - -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. - -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . diff --git a/src/Kernels/doc/Intro_kernels.h b/src/Kernels/doc/Intro_kernels.h deleted file mode 100644 index 163690b1..00000000 --- a/src/Kernels/doc/Intro_kernels.h +++ /dev/null @@ -1,108 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2017 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef DOC_KERNEL_INTRO_KERNEL_H_ -#define DOC_KERNEL_INTRO_KERNEL_H_ - -namespace Gudhi { - -namespace kernel { - -/** \defgroup kernel Kernels - * - * \author Mathieu Carrière - * - * @{ - * - * Kernels are generalized scalar products. They take the form of functions whose evaluations on pairs of persistence diagrams are equal - * to the scalar products of the images of the diagrams under some feature map into a (generally unknown and infinite dimensional) - * Hilbert space. Kernels are - * very useful to handle any type of data for algorithms that require at least a Hilbert structure, such as Principal Component Analysis - * or Support Vector Machines. In this package, we implement three kernels for persistence diagrams: - * the Persistence Scale Space Kernel (PSSK)---see \cite Reininghaus_Huber_ALL_PSSK, - * the Persistence Weighted Gaussian Kernel (PWGK)---see \cite Kusano_Fukumizu_Hiraoka_PWGK, - * and the Sliced Wasserstein Kernel (SWK)---see \cite pmlr-v70-carriere17a. - * - * \section pwg Persistence Weighted Gaussian Kernel and Persistence Scale Space Kernel - * - * The PWGK is built with Gaussian Kernel Mean Embedding, meaning that each persistence diagram is first - * sent to the Hilbert space of a Gaussian kernel with bandwidth parameter \f$\sigma >0\f$ using a weighted mean embedding \f$\Phi\f$: - * - * \f$ \Phi\,:\,D\,\rightarrow\,\sum_{p\in D}\,w(p)\,{\rm exp}\left(-\frac{\|p-\cdot\|_2^2}{2\sigma^2}\right) \f$, - * - * Usually, the weight function is chosen to be an arctan function of the distance of the point to the diagonal: - * \f$w(p) = {\rm arctan}(C\,|y-x|^\alpha)\f$, for some parameters \f$C,\alpha >0\f$. - * Then, either their scalar product in this space is - * computed (Linear Persistence Weighted Gaussian Kernel): - * - * \f$ LPWGK(D_1,D_2)=\langle\Phi(D_1),\Phi(D_2)\rangle - * \,=\,\sum_{p\in D_1}\,\sum_{q\in D_2}\,w(p)\,w(q)\,{\rm exp}\left(-\frac{\|p-q\|_2^2}{2\sigma^2}\right)\f$, - * - * or a second Gaussian kernel with bandwidth parameter \f$\tau >0\f$ is applied to their distance in this space - * (Gaussian Persistence Weighted Gaussian Kernel): - * - * \f$ GPWGK(D_1,D_2)={\rm exp}\left(-\frac{\|\Phi(D_1)-\Phi(D_2)\|^2}{2\tau^2} \right)\f$, - * where \f$\|\Phi(D_1)-\Phi(D_2)\|^2 = \langle\Phi(D_1)-\Phi(D_2),\Phi(D_1)-\Phi(D_2)\rangle\f$. - * - * It follows that the computation time is \f$O(n^2)\f$ where \f$n\f$ is the number of points - * in the diagrams. This time can be improved by computing approximations of the kernel - * with \f$m\f$ Fourier features \cite Rahimi07randomfeatures. In that case, the computation time becomes \f$O(mn)\f$. - * - * The PSSK is a Linear Persistence Weighted Gaussian Kernel between modified diagrams: - * the symmetric of each point with respect to the diagonal is first added in each diagram, and then the weight function - * is set to be +1 if the point is above the diagonal and -1 otherwise. - * - * \section sw Sliced Wasserstein Kernel - * - * The Sliced Wasserstein Kernel is defined as a Gaussian-like Kernel between persistence diagrams, where the distance used for - * comparison is the Sliced Wasserstein distance \f$SW\f$ between persistence diagrams, defined as the integral of the 1-norm - * between the sorted projections of the diagrams onto all lines passing through the origin: - * - * \f$ SW(D_1,D_2)=\int_{\theta\in\mathbb{S}}\,\|\pi_\theta(D_1\cup\pi_\Delta(D_2))-\pi_\theta(D_2\cup\pi_\Delta(D_1))\|_1{\rm d}\theta\f$, - * - * where \f$\pi_\theta\f$ is the projection onto the line defined with angle \f$\theta\f$ in the unit circle \f$\mathbb{S}\f$, - * and \f$\pi_\Delta\f$ is the projection onto the diagonal. - * The integral can be either computed exactly in \f$O(n^2{\rm log}(n))\f$ time, where \f$n\f$ is the number of points - * in the diagrams, or approximated by sampling \f$m\f$ lines in the circle in \f$O(mn{\rm log}(n))\f$ time. The SWK is then computed as: - * - * \f$ SWK(D_1,D_2) = {\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right).\f$ - * - * When launching: - * - * \code $> ./BasicEx ../../../../data/persistence_diagram/PD1 ../../../../data/persistence_diagram/PD2 - * \endcode - * - * the program output is: - * - * \include Kernels/kernel.txt - * - * - * \copyright GNU General Public License v3. - * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim - */ -/** @} */ // end defgroup kernel - -} // namespace kernel - -} // namespace Gudhi - -#endif // DOC_KERNEL_INTRO_KERNEL_H_ diff --git a/src/Kernels/example/CMakeLists.txt b/src/Kernels/example/CMakeLists.txt deleted file mode 100644 index d8ad4b42..00000000 --- a/src/Kernels/example/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -cmake_minimum_required(VERSION 2.6) -project(Kernels_examples) - -add_executable ( BasicEx kernel_basic_example.cpp ) - -if (TBB_FOUND) - target_link_libraries(BasicEx ${TBB_LIBRARIES}) -endif() - -add_test(NAME Kernels_example_basicex COMMAND $ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/PD1" "${CMAKE_SOURCE_DIR}/data/persistence_diagram/PD2") \ No newline at end of file diff --git a/src/Kernels/example/kernel.txt b/src/Kernels/example/kernel.txt deleted file mode 100644 index 5fb8b504..00000000 --- a/src/Kernels/example/kernel.txt +++ /dev/null @@ -1,8 +0,0 @@ -SWK exact = 0.875446 -SWK approx = 0.875204 -PSSK exact = 0.0218669 -PSSK approx = 0.0213766 -LPWGK exact = 2.57351 -LPWGK approx = 2.49102 -GPWGK exact = 0.98783 -GPWGK approx = 0.987591 \ No newline at end of file diff --git a/src/Kernels/example/kernel_basic_example.cpp b/src/Kernels/example/kernel_basic_example.cpp deleted file mode 100644 index 7ecbe401..00000000 --- a/src/Kernels/example/kernel_basic_example.cpp +++ /dev/null @@ -1,65 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Authors: Mathieu Carrière - * - * Copyright (C) 2017 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include -#include -#include -#include -#include - - -void usage(int nbArgs, char *const progName) { - std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " PD1 PD2 \n"; - std::cerr << " i.e.: " << progName << " ../../../../data/persistence_diagram/PD1.pers ../../../../data/persistence_diagram/PD2.pers \n"; - exit(-1); // ----- >> -} - -int main(int argc, char **argv) { - - if (argc != 3) usage(argc, argv[0]); - - double sigma = 2; double tau = 5; - - std::string PDname1(argv[1]); std::string PDname2(argv[2]); - std::vector< std::pair > v1, v2; std::string line; double b,d; - - std::ifstream input1(PDname1); - while(std::getline(input1,line)){ - std::stringstream stream(line); stream >> b; stream >> d; v1.push_back(std::pair(b,d)); - } - - std::ifstream input2(PDname2); - while(std::getline(input2,line)){ - std::stringstream stream(line); stream >> b; stream >> d; v2.push_back(std::pair(b,d)); - } - - std::cout << "SWK exact = " << Gudhi::kernel::sliced_wasserstein_kernel (v1,v2,sigma,-1) << std::endl; - std::cout << "SWK approx = " << Gudhi::kernel::sliced_wasserstein_kernel (v1,v2,sigma) << std::endl; - std::cout << "PSSK exact = " << Gudhi::kernel::persistence_scale_space_kernel (v1,v2,sigma,-1) << std::endl; - std::cout << "PSSK approx = " << Gudhi::kernel::persistence_scale_space_kernel (v1,v2,sigma) << std::endl; - std::cout << "LPWGK exact = " << Gudhi::kernel::linear_persistence_weighted_gaussian_kernel (v1,v2,sigma,Gudhi::kernel::arctan_weight,-1) << std::endl; - std::cout << "LPWGK approx = " << Gudhi::kernel::linear_persistence_weighted_gaussian_kernel (v1,v2,sigma,Gudhi::kernel::arctan_weight) << std::endl; - std::cout << "GPWGK exact = " << Gudhi::kernel::gaussian_persistence_weighted_gaussian_kernel (v1,v2,sigma,tau,Gudhi::kernel::arctan_weight,-1) << std::endl; - std::cout << "GPWGK approx = " << Gudhi::kernel::gaussian_persistence_weighted_gaussian_kernel (v1,v2,sigma,tau,Gudhi::kernel::arctan_weight) << std::endl; - -} diff --git a/src/Kernels/include/gudhi/kernel.h b/src/Kernels/include/gudhi/kernel.h deleted file mode 100644 index 3293cc62..00000000 --- a/src/Kernels/include/gudhi/kernel.h +++ /dev/null @@ -1,365 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef KERNEL_H_ -#define KERNEL_H_ - -#include -#include -#include -#include -#include -#include //for numeric_limits<> -#include //for pair<> - -#include - - -namespace Gudhi { -namespace kernel { - -using PD = std::vector >; -double pi = boost::math::constants::pi(); - - - - -// ******************************************************************** -// Utils. -// ******************************************************************** - -bool sortAngle(const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);} -bool myComp(const std::pair & P1, const std::pair & P2){return P1.second < P2.second;} - -double pss_weight(std::pair P){ - if(P.second > P.first) return 1; - else return -1; -} - -double arctan_weight(std::pair P){ - return atan(P.second - P.first); -} - -// Compute the angle formed by two points of a PD -double compute_angle(const PD & PersDiag, const int & i, const int & j){ - std::pair vect; double x1,y1, x2,y2; - x1 = PersDiag[i].first; y1 = PersDiag[i].second; - x2 = PersDiag[j].first; y2 = PersDiag[j].second; - if (y1 - y2 > 0){ - vect.first = y1 - y2; - vect.second = x2 - x1;} - else{ - if(y1 - y2 < 0){ - vect.first = y2 - y1; - vect.second = x1 - x2; - } - else{ - vect.first = 0; - vect.second = abs(x1 - x2);} - } - double norm = std::sqrt(vect.first*vect.first + vect.second*vect.second); - return asin(vect.second/norm); -} - -// Compute the integral of |cos()| between alpha and beta, valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] -double compute_int_cos(const double & alpha, const double & beta){ - double res = 0; - if (alpha >= 0 && alpha <= pi){ - if (cos(alpha) >= 0){ - if(pi/2 <= beta){res = 2-sin(alpha)-sin(beta);} - else{res = sin(beta)-sin(alpha);} - } - else{ - if(1.5*pi <= beta){res = 2+sin(alpha)+sin(beta);} - else{res = sin(alpha)-sin(beta);} - } - } - if (alpha >= -pi && alpha <= 0){ - if (cos(alpha) <= 0){ - if(-pi/2 <= beta){res = 2+sin(alpha)+sin(beta);} - else{res = sin(alpha)-sin(beta);} - } - else{ - if(pi/2 <= beta){res = 2-sin(alpha)-sin(beta);} - else{res = sin(beta)-sin(alpha);} - } - } - return res; -} - -double compute_int(const double & theta1, const double & theta2, const int & p, const int & q, const PD & PD1, const PD & PD2){ - double norm = std::sqrt( (PD1[p].first-PD2[q].first)*(PD1[p].first-PD2[q].first) + (PD1[p].second-PD2[q].second)*(PD1[p].second-PD2[q].second) ); - double angle1; - if (PD1[p].first > PD2[q].first) - angle1 = theta1 - asin( (PD1[p].second-PD2[q].second)/norm ); - else - angle1 = theta1 - asin( (PD2[q].second-PD1[p].second)/norm ); - double angle2 = angle1 + theta2 - theta1; - double integral = compute_int_cos(angle1,angle2); - return norm*integral; -} - -template) > > -std::vector > Fourier_feat(PD D, std::vector > Z, Weight weight = arctan_weight){ - int m = D.size(); std::vector > B; int M = Z.size(); - for(int i = 0; i < M; i++){ - double d1 = 0; double d2 = 0; double zx = Z[i].first; double zy = Z[i].second; - for(int j = 0; j < m; j++){ - double x = D[j].first; double y = D[j].second; - d1 += weight(D[j])*cos(x*zx + y*zy); - d2 += weight(D[j])*sin(x*zx + y*zy); - } - B.emplace_back(d1,d2); - } - return B; -} - -std::vector > random_Fourier(double sigma, int M = 1000){ - std::normal_distribution distrib(0,1); std::vector > Z; std::random_device rd; - for(int i = 0; i < M; i++){ - std::mt19937 e1(rd()); std::mt19937 e2(rd()); - double zx = distrib(e1); double zy = distrib(e2); - Z.emplace_back(zx/sigma,zy/sigma); - } - return Z; -} - - - - - - - - - - -// ******************************************************************** -// Kernel computation. -// ******************************************************************** - - - - - -/** \brief Computes the Linear Persistence Weighted Gaussian Kernel between two persistence diagrams with random Fourier features. - * \ingroup kernel - * - * @param[in] PD1 first persistence diagram. - * @param[in] PD2 second persistence diagram. - * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. - * @param[in] weight weight function for the points in the diagrams. - * @param[in] M number of Fourier features (set -1 for exact computation). - * - */ -template) > > -double linear_persistence_weighted_gaussian_kernel(const PD & PD1, const PD & PD2, double sigma, Weight weight = arctan_weight, int M = 1000){ - - if(M == -1){ - int num_pts1 = PD1.size(); int num_pts2 = PD2.size(); double k = 0; - for(int i = 0; i < num_pts1; i++) - for(int j = 0; j < num_pts2; j++) - k += weight(PD1[i])*weight(PD2[j])*exp(-((PD1[i].first-PD2[j].first)*(PD1[i].first-PD2[j].first) + (PD1[i].second-PD2[j].second)*(PD1[i].second-PD2[j].second))/(2*sigma*sigma)); - return k; - } - else{ - std::vector > Z = random_Fourier(sigma, M); - std::vector > B1 = Fourier_feat(PD1,Z,weight); - std::vector > B2 = Fourier_feat(PD2,Z,weight); - double d = 0; for(int i = 0; i < M; i++) d += B1[i].first*B2[i].first + B1[i].second*B2[i].second; - return d/M; - } -} - -/** \brief Computes the Persistence Scale Space Kernel between two persistence diagrams with random Fourier features. - * \ingroup kernel - * - * @param[in] PD1 first persistence diagram. - * @param[in] PD2 second persistence diagram. - * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. - * @param[in] M number of Fourier features (set -1 for exact computation). - * - */ -double persistence_scale_space_kernel(const PD & PD1, const PD & PD2, double sigma, int M = 1000){ - PD pd1 = PD1; int numpts = PD1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(PD1[i].second,PD1[i].first); - PD pd2 = PD2; numpts = PD2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(PD2[i].second,PD2[i].first); - return linear_persistence_weighted_gaussian_kernel(pd1, pd2, 2*sqrt(sigma), pss_weight, M) / (2*8*pi*sigma); -} - - -/** \brief Computes the Gaussian Persistence Weighted Gaussian Kernel between two persistence diagrams with random Fourier features. - * \ingroup kernel - * - * @param[in] PD1 first persistence diagram. - * @param[in] PD2 second persistence diagram. - * @param[in] sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. - * @param[in] tau bandwidth parameter of the Gaussian Kernel used between the embeddings. - * @param[in] weight weight function for the points in the diagrams. - * @param[in] M number of Fourier features (set -1 for exact computation). - * - */ -template) > > -double gaussian_persistence_weighted_gaussian_kernel(const PD & PD1, const PD & PD2, double sigma, double tau, Weight weight = arctan_weight, int M = 1000){ - double k1 = linear_persistence_weighted_gaussian_kernel(PD1,PD1,sigma,weight,M); - double k2 = linear_persistence_weighted_gaussian_kernel(PD2,PD2,sigma,weight,M); - double k3 = linear_persistence_weighted_gaussian_kernel(PD1,PD2,sigma,weight,M); - return exp( - (k1+k2-2*k3) / (2*tau*tau) ); -} - - -/** \brief Computes the Sliced Wasserstein Kernel between two persistence diagrams with sampled directions. - * \ingroup kernel - * - * @param[in] PD1 first persistence diagram. - * @param[in] PD2 second persistence diagram. - * @param[in] sigma bandwidth parameter. - * @param[in] N number of points sampled on the circle (set -1 for exact computation). - * - */ -double sliced_wasserstein_kernel(PD PD1, PD PD2, double sigma, int N = 100){ - - if(N == -1){ - - // Add projections onto diagonal. - int n1, n2; n1 = PD1.size(); n2 = PD2.size(); double max_ordinate = std::numeric_limits::lowest(); - for (int i = 0; i < n2; i++){ - max_ordinate = std::max(max_ordinate, PD2[i].second); - PD1.emplace_back( (PD2[i].first+PD2[i].second)/2, (PD2[i].first+PD2[i].second)/2 ); - } - for (int i = 0; i < n1; i++){ - max_ordinate = std::max(max_ordinate, PD1[i].second); - PD2.emplace_back( (PD1[i].first+PD1[i].second)/2, (PD1[i].first+PD1[i].second)/2 ); - } - int num_pts_dgm = PD1.size(); - - // Slightly perturb the points so that the PDs are in generic positions. - int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} - double thresh = pow(10,-5+mag); - srand(time(NULL)); - for (int i = 0; i < num_pts_dgm; i++){ - PD1[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD1[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); - PD2[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); PD2[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); - } - - // Compute all angles in both PDs. - std::vector > > angles1, angles2; - for (int i = 0; i < num_pts_dgm; i++){ - for (int j = i+1; j < num_pts_dgm; j++){ - double theta1 = compute_angle(PD1,i,j); double theta2 = compute_angle(PD2,i,j); - angles1.emplace_back(theta1, std::pair(i,j)); - angles2.emplace_back(theta2, std::pair(i,j)); - } - } - - // Sort angles. - std::sort(angles1.begin(), angles1.end(), sortAngle); std::sort(angles2.begin(), angles2.end(), sortAngle); - - // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). - std::vector orderp1, orderp2; - for (int i = 0; i < num_pts_dgm; i++){ orderp1.push_back(i); orderp2.push_back(i); } - std::sort( orderp1.begin(), orderp1.end(), [=](int i, int j){ if(PD1[i].second != PD1[j].second) return (PD1[i].second < PD1[j].second); else return (PD1[i].first > PD1[j].first); } ); - std::sort( orderp2.begin(), orderp2.end(), [=](int i, int j){ if(PD2[i].second != PD2[j].second) return (PD2[i].second < PD2[j].second); else return (PD2[i].first > PD2[j].first); } ); - - // Find the inverses of the orders. - std::vector order1(num_pts_dgm); std::vector order2(num_pts_dgm); - for(int i = 0; i < num_pts_dgm; i++) for (int j = 0; j < num_pts_dgm; j++) if(orderp1[j] == i){ order1[i] = j; break; } - for(int i = 0; i < num_pts_dgm; i++) for (int j = 0; j < num_pts_dgm; j++) if(orderp2[j] == i){ order2[i] = j; break; } - - // Record all inversions of points in the orders as theta varies along the positive half-disk. - std::vector > > anglePerm1(num_pts_dgm); - std::vector > > anglePerm2(num_pts_dgm); - - int M1 = angles1.size(); - for (int i = 0; i < M1; i++){ - double theta = angles1[i].first; int p = angles1[i].second.first; int q = angles1[i].second.second; - anglePerm1[order1[p]].emplace_back(p,theta); - anglePerm1[order1[q]].emplace_back(q,theta); - int a = order1[p]; int b = order1[q]; order1[p] = b; order1[q] = a; - } - - int M2 = angles2.size(); - for (int i = 0; i < M2; i++){ - double theta = angles2[i].first; int p = angles2[i].second.first; int q = angles2[i].second.second; - anglePerm2[order2[p]].emplace_back(p,theta); - anglePerm2[order2[q]].emplace_back(q,theta); - int a = order2[p]; int b = order2[q]; order2[p] = b; order2[q] = a; - } - - for (int i = 0; i < num_pts_dgm; i++){ - anglePerm1[order1[i]].emplace_back(i,pi/2); - anglePerm2[order2[i]].emplace_back(i,pi/2); - } - - // Compute the SW distance with the list of inversions. - double sw = 0; - for (int i = 0; i < num_pts_dgm; i++){ - std::vector > U,V; U = anglePerm1[i]; V = anglePerm2[i]; - double theta1, theta2; theta1 = -pi/2; - unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(U[ku].second,V[kv].second); - while(theta1 != pi/2){ - if(PD1[U[ku].first].first != PD2[V[kv].first].first || PD1[U[ku].first].second != PD2[V[kv].first].second) - if(theta1 != theta2) - sw += compute_int(theta1, theta2, U[ku].first, V[kv].first, PD1, PD2); - theta1 = theta2; - if ( (theta2 == U[ku].second) && ku < U.size()-1 ) ku++; - if ( (theta2 == V[kv].second) && kv < V.size()-1 ) kv++; - theta2 = std::min(U[ku].second, V[kv].second); - } - } - - return exp( -(sw/pi)/(2*sigma*sigma) ); - - } - - - else{ - double step = pi/N; double sw = 0; - - // Add projections onto diagonal. - int n1, n2; n1 = PD1.size(); n2 = PD2.size(); - for (int i = 0; i < n2; i++) - PD1.emplace_back( (PD2[i].first + PD2[i].second)/2, (PD2[i].first + PD2[i].second)/2 ); - for (int i = 0; i < n1; i++) - PD2.emplace_back( (PD1[i].first + PD1[i].second)/2, (PD1[i].first + PD1[i].second)/2 ); - int n = PD1.size(); - - // Sort and compare all projections. - //#pragma omp parallel for - for (int i = 0; i < N; i++){ - std::vector > L1, L2; - for (int j = 0; j < n; j++){ - L1.emplace_back( j, PD1[j].first*cos(-pi/2+i*step) + PD1[j].second*sin(-pi/2+i*step) ); - L2.emplace_back( j, PD2[j].first*cos(-pi/2+i*step) + PD2[j].second*sin(-pi/2+i*step) ); - } - std::sort(L1.begin(),L1.end(), myComp); std::sort(L2.begin(),L2.end(), myComp); - double f = 0; for (int j = 0; j < n; j++) f += std::abs(L1[j].second - L2[j].second); - sw += f*step; - } - return exp( -(sw/pi)/(2*sigma*sigma) ); - } -} - - -} // namespace kernel - -} // namespace Gudhi - -#endif //KERNEL_H_ diff --git a/src/Kernels/test/CMakeLists.txt b/src/Kernels/test/CMakeLists.txt deleted file mode 100644 index 95c72a7f..00000000 --- a/src/Kernels/test/CMakeLists.txt +++ /dev/null @@ -1,12 +0,0 @@ -cmake_minimum_required(VERSION 2.6) -project(kernel_tests) - -include(GUDHI_test_coverage) - -add_executable ( kernel_test_unit test_kernel.cpp ) -target_link_libraries(kernel_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) -if (TBB_FOUND) - target_link_libraries(kernel_test_unit ${TBB_LIBRARIES}) -endif() - -gudhi_add_coverage_test(kernel_test_unit) diff --git a/src/Kernels/test/test_kernel.cpp b/src/Kernels/test/test_kernel.cpp deleted file mode 100644 index db05fd28..00000000 --- a/src/Kernels/test/test_kernel.cpp +++ /dev/null @@ -1,56 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2017 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#define BOOST_TEST_DYN_LINK -#define BOOST_TEST_MODULE "kernel" - -#include -#include // float comparison -#include -#include -#include -#include // std::max -#include -#include -#include - -BOOST_AUTO_TEST_CASE(check_PSS) { - std::vector< std::pair > v1, v2; - v1.emplace_back(std::pair(0,1)); - v2.emplace_back(std::pair(0,2)); - BOOST_CHECK(std::abs(Gudhi::kernel::pssk(v1,v2,1) - Gudhi::kernel::approx_pssk(v1,v2,1)) <= 1e-1); -} - -BOOST_AUTO_TEST_CASE(check_PWG) { - std::vector< std::pair > v1, v2; - v1.emplace_back(std::pair(0,1)); - v2.emplace_back(std::pair(0,2)); - BOOST_CHECK(std::abs(Gudhi::kernel::lpwgk(v1,v2,1) - Gudhi::kernel::approx_lpwgk(v1,v2,1)) <= 1e-1); - BOOST_CHECK(std::abs(Gudhi::kernel::gpwgk(v1,v2,1,1) - Gudhi::kernel::approx_gpwgk(v1,v2,1,1)) <= 1e-1); -} - -BOOST_AUTO_TEST_CASE(check_SW) { - std::vector< std::pair > v1, v2; - v2.emplace_back(std::pair(0,2)); - BOOST_CHECK(std::abs(Gudhi::kernel::sw(v1,v2) - Gudhi::kernel::approx_sw(v1,v2)) <= 1e-3); - BOOST_CHECK(std::abs(Gudhi::kernel::sw(v1,v2) - 2*std::sqrt(2)/3.1415) <= 1e-3); -} diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index 79d39c4d..89284e38 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -37,3 +37,12 @@ add_test(NAME Persistence_weighted_gaussian COMMAND $) install(TARGETS Persistence_weighted_gaussian DESTINATION bin) +add_executable ( Persistence_image persistence_image.cpp ) +add_test(NAME Persistence_image + COMMAND $) +install(TARGETS Persistence_image DESTINATION bin) + +add_executable ( Landscape landscape.cpp ) +add_test(NAME Landscape + COMMAND $) +install(TARGETS Landscape DESTINATION bin) diff --git a/src/Persistence_representations/example/landscape.cpp b/src/Persistence_representations/example/landscape.cpp new file mode 100644 index 00000000..5fa84a7c --- /dev/null +++ b/src/Persistence_representations/example/landscape.cpp @@ -0,0 +1,51 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include +#include + +using LS = Gudhi::Persistence_representations::Landscape; + +int main(int argc, char** argv) { + + std::vector > persistence; + + persistence.push_back(std::make_pair(1, 2)); + persistence.push_back(std::make_pair(6, 8)); + persistence.push_back(std::make_pair(0, 4)); + persistence.push_back(std::make_pair(3, 8)); + + int nb_ls = 3; double min_x = 0.0; double max_x = 10.0; int res_x = 100; + + LS ls(persistence, nb_ls, min_x, max_x, res_x); + std::vector > L = ls.vectorize(); + + for(int i = 0; i < nb_ls; i++){ + for(int j = 0; j < res_x; j++) std::cout << L[i][j] << " "; + std::cout << std::endl; + } + + return 0; +} diff --git a/src/Persistence_representations/example/persistence_image.cpp b/src/Persistence_representations/example/persistence_image.cpp new file mode 100644 index 00000000..dfa469d4 --- /dev/null +++ b/src/Persistence_representations/example/persistence_image.cpp @@ -0,0 +1,54 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include + +#include +#include +#include +#include + +using PI = Gudhi::Persistence_representations::Persistence_image; +using Weight = std::function) >; + +int main(int argc, char** argv) { + + std::vector > persistence; + + persistence.push_back(std::make_pair(1, 2)); + persistence.push_back(std::make_pair(6, 8)); + persistence.push_back(std::make_pair(0, 4)); + persistence.push_back(std::make_pair(3, 8)); + + double min_x = 0.0; double max_x = 10.0; int res_x = 100; double min_y = 0.0; double max_y = 10.0; int res_y = 100; double sigma = 1.0; Weight weight = Gudhi::Persistence_representations::Persistence_weighted_gaussian::linear_weight; + + PI pim(persistence, min_x, max_x, res_x, min_y, max_y, res_y, weight, sigma); + std::vector > P = pim.vectorize(); + + for(int i = 0; i < res_y; i++){ + for(int j = 0; j < res_x; j++) std::cout << P[i][j] << " "; + std::cout << std::endl; + } + + return 0; +} diff --git a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp index dea5dab6..234f6323 100644 --- a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp +++ b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp @@ -48,11 +48,11 @@ int main(int argc, char** argv) { double tau = 1; int m = 10000; - PWG PWG1(persistence1, sigma, m, PWG::arctan_weight); - PWG PWG2(persistence2, sigma, m, PWG::arctan_weight); + PWG PWG1(persistence1, sigma, m, PWG::arctan_weight(1,1)); + PWG PWG2(persistence2, sigma, m, PWG::arctan_weight(1,1)); - PWG PWGex1(persistence1, sigma, -1, PWG::arctan_weight); - PWG PWGex2(persistence2, sigma, -1, PWG::arctan_weight); + PWG PWGex1(persistence1, sigma, -1, PWG::arctan_weight(1,1)); + PWG PWGex2(persistence2, sigma, -1, PWG::arctan_weight(1,1)); // Linear PWG diff --git a/src/Persistence_representations/include/gudhi/Landscape.h b/src/Persistence_representations/include/gudhi/Landscape.h new file mode 100644 index 00000000..d6608a57 --- /dev/null +++ b/src/Persistence_representations/include/gudhi/Landscape.h @@ -0,0 +1,103 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef LANDSCAPE_H_ +#define LANDSCAPE_H_ + +// gudhi include +#include +#include +#include + +// standard include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using PD = std::vector >; + +namespace Gudhi { +namespace Persistence_representations { + +/** + * \class Landscape gudhi/Landscape.h + * \brief A class implementing the Landscapes. + * + * \ingroup Persistence_representations + * + * \details + * +**/ + +class Landscape { + + protected: + PD diagram; + int res_x, nb_ls; + double min_x, max_x; + + public: + + /** \brief Landscape constructor. + * \ingroup Landscape + * + */ + Landscape(PD _diagram, int _nb_ls = 5, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; nb_ls = _nb_ls; min_x = _min_x; max_x = _max_x; res_x = _res_x;} + + /** \brief Computes the landscape of a diagram. + * \ingroup Landscape + * + */ + std::vector > vectorize() { + std::vector > ls; for(int i = 0; i < nb_ls; i++) ls.emplace_back(); + int num_pts = diagram.size(); double step = (max_x - min_x)/res_x; + + for(int i = 0; i < res_x; i++){ + double x = min_x + i*step; double t = x / std::sqrt(2); std::vector events; + for(int j = 0; j < num_pts; j++){ + double px = diagram[j].first; double py = diagram[j].second; + if(t >= px && t <= py){ if(t >= (px+py)/2) events.push_back(std::sqrt(2)*(py-t)); else events.push_back(std::sqrt(2)*(t-px)); } + } + + std::sort(events.begin(), events.end(), [](const double & a, const double & b){return a > b;}); int nb_events = events.size(); + for (int j = 0; j < nb_ls; j++){ if(j < nb_events) ls[j].push_back(events[j]); else ls[j].push_back(0); } + } + + return ls; + } + + + + +}; + +} // namespace Landscape +} // namespace Gudhi + +#endif // LANDSCAPE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_image.h b/src/Persistence_representations/include/gudhi/Persistence_image.h new file mode 100644 index 00000000..6c9f75b7 --- /dev/null +++ b/src/Persistence_representations/include/gudhi/Persistence_image.h @@ -0,0 +1,117 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef PERSISTENCE_IMAGE_H_ +#define PERSISTENCE_IMAGE_H_ + +// gudhi include +#include +#include +#include +#include + +// standard include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using PD = std::vector >; +using Weight = std::function) >; + +namespace Gudhi { +namespace Persistence_representations { + +/** + * \class Persistence_image gudhi/Persistence_image.h + * \brief A class implementing the Persistence Images. + * + * \ingroup Persistence_representations + * + * \details + * +**/ + +class Persistence_image { + + protected: + PD diagram; + int res_x, res_y; + double min_x, max_x, min_y, max_y; + Weight weight; + double sigma; + + public: + + /** \brief Persistence Image constructor. + * \ingroup Persistence_image + * + */ + Persistence_image(PD _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10, double _min_y = 0.0, double _max_y = 1.0, int _res_y = 10, + Weight _weight = Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(1,1), double _sigma = 1.0){ + diagram = _diagram; min_x = _min_x; max_x = _max_x; res_x = _res_x; min_y = _min_y; max_y = _max_y; res_y = _res_y, weight = _weight; sigma = _sigma; + } + + /** \brief Computes the persistence image of a diagram. + * \ingroup Persistence_image + * + */ + std::vector > vectorize() { + std::vector > im; for(int i = 0; i < res_y; i++) im.emplace_back(); + double step_x = (max_x - min_x)/res_x; double step_y = (max_y - min_y)/res_y; + + int num_pts = diagram.size(); + + for(int i = 0; i < res_y; i++){ + double y = min_y + i*step_y; + for(int j = 0; j < res_x; j++){ + double x = min_x + j*step_x; + + double pixel_value = 0; + for(int k = 0; k < num_pts; k++){ + double px = diagram[k].first; double py = diagram[k].second; + pixel_value += weight(std::pair(px,py)) * std::exp( -((x-px)*(x-px) + (y-(py-px))*(y-(py-px))) / (2*sigma*sigma) ) / (sigma*std::sqrt(2*pi)); + } + im[i].push_back(pixel_value); + + } + } + + return im; + + } + + + + +}; + +} // namespace Persistence_image +} // namespace Gudhi + +#endif // PERSISTENCE_IMAGE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index b30e0273..9a63fccd 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -97,7 +97,7 @@ class Persistence_weighted_gaussian{ * @param[in] _weight weight function for the points in the diagrams. * */ - Persistence_weighted_gaussian(PD _diagram, double _sigma = 1.0, int _approx = 1000, Weight _weight = arctan_weight){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} + Persistence_weighted_gaussian(PD _diagram, double _sigma = 1.0, int _approx = 1000, Weight _weight = arctan_weight(1,1)){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} PD get_diagram() const {return this->diagram;} double get_sigma() const {return this->sigma;} @@ -115,16 +115,13 @@ class Persistence_weighted_gaussian{ * @param[in] p point in 2D. * */ - static double pss_weight(std::pair p){ - if(p.second > p.first) return 1; - else return -1; - } + static double pss_weight(std::pair p) {if(p.second > p.first) return 1; else return -1;} + static double linear_weight(std::pair p) {return std::abs(p.second - p.first);} + static double const_weight(std::pair p) {return 1;} + static std::function) > arctan_weight(double C, double power) {return [=](std::pair p){return C * atan(std::pow(std::abs(p.second - p.first), power));};} - static double arctan_weight(std::pair p){ - return atan(p.second - p.first); - } - std::vector > Fourier_feat(PD diag, std::vector > z, Weight weight = arctan_weight){ + std::vector > Fourier_feat(PD diag, std::vector > z, Weight weight = arctan_weight(1,1)){ int md = diag.size(); std::vector > b; int mz = z.size(); for(int i = 0; i < mz; i++){ double d1 = 0; double d2 = 0; double zx = z[i].first; double zy = z[i].second; diff --git a/src/cython/cython/kernels.pyx b/src/cython/cython/kernels.pyx index 466917b1..0cb296ec 100644 --- a/src/cython/cython/kernels.pyx +++ b/src/cython/cython/kernels.pyx @@ -34,8 +34,8 @@ cdef extern from "Kernels_interface.h" namespace "Gudhi::persistence_diagram": vector[vector[double]] sw_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) double pss (vector[pair[double, double]], vector[pair[double, double]], double, int) vector[vector[double]] pss_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) - double pwg (vector[pair[double, double]], vector[pair[double, double]], double, int) - vector[vector[double]] pwg_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) + double pwg (vector[pair[double, double]], vector[pair[double, double]], double, int, double, double) + vector[vector[double]] pwg_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int, double, double) def sliced_wasserstein(diagram_1, diagram_2, sigma = 1, N = 100): """ @@ -65,7 +65,7 @@ def sliced_wasserstein_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): """ return sw_matrix(diagrams_1, diagrams_2, sigma, N) -def persistence_weighted_gaussian(diagram_1, diagram_2, sigma = 1, N = 100): +def persistence_weighted_gaussian(diagram_1, diagram_2, sigma = 1, N = 100, C = 1, p = 1): """ :param diagram_1: The first diagram. @@ -74,12 +74,14 @@ def persistence_weighted_gaussian(diagram_1, diagram_2, sigma = 1, N = 100): :type diagram_2: vector[pair[double, double]] :param sigma: bandwidth of Gaussian :param N: number of Fourier features + :param C: cost of persistence weight + :param p: power of persistence weight :returns: the persistence weighted gaussian kernel. """ - return pwg(diagram_1, diagram_2, sigma, N) + return pwg(diagram_1, diagram_2, sigma, N, C, p) -def persistence_weighted_gaussian_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): +def persistence_weighted_gaussian_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100, C = 1, p = 1): """ :param diagram_1: The first set of diagrams. @@ -88,10 +90,12 @@ def persistence_weighted_gaussian_matrix(diagrams_1, diagrams_2, sigma = 1, N = :type diagram_2: vector[vector[pair[double, double]]] :param sigma: bandwidth of Gaussian :param N: number of Fourier features + :param C: cost of persistence weight + :param p: power of persistence weight :returns: the persistence weighted gaussian kernel matrix. """ - return pwg_matrix(diagrams_1, diagrams_2, sigma, N) + return pwg_matrix(diagrams_1, diagrams_2, sigma, N, C, p) def persistence_scale_space(diagram_1, diagram_2, sigma = 1, N = 100): """ diff --git a/src/cython/cython/vectors.pyx b/src/cython/cython/vectors.pyx new file mode 100644 index 00000000..42390ae6 --- /dev/null +++ b/src/cython/cython/vectors.pyx @@ -0,0 +1,65 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +import os + +"""This file is part of the Gudhi Library. The Gudhi library + (Geometric Understanding in Higher Dimensions) is a generic C++ + library for computational topology. + + Author(s): Mathieu Carriere + + Copyright (C) 2018 INRIA + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +__author__ = "Mathieu Carriere" +__copyright__ = "Copyright (C) 2018 INRIA" +__license__ = "GPL v3" + +cdef extern from "Vectors_interface.h" namespace "Gudhi::persistence_diagram": + vector[vector[double]] compute_ls (vector[pair[double, double]], int, double, double, int) + vector[vector[double]] compute_pim (vector[pair[double, double]], double, double, int, double, double, int, string, double, double, double) + +def landscape(diagram, nb_ls = 10, min_x = 0.0, max_x = 1.0, res_x = 100): + """ + + :param diagram: The diagram + :type diagram: vector[pair[double, double]] + :param nb_ls: Number of landscapes + :param min_x: Minimum abscissa + :param max_x: Maximum abscissa + :param res_x: Number of samples + + :returns: the landscape + """ + return compute_ls(diagram, nb_ls, min_x, max_x, res_x) + +def persistence_image(diagram, min_x = 0.0, max_x = 1.0, res_x = 10, min_y = 0.0, max_y = 1.0, res_y = 10, weight = "linear", sigma = 1.0, C = 1.0, p = 1.0): + """ + + :param diagram: The diagram + :type diagram: vector[vector[pair[double, double]]] + :param min_x: Minimum abscissa + :param max_x: Maximum abscissa + :param res_x: Number of abscissa pixels + :param min_x: Minimum ordinate + :param max_x: Maximum ordinate + :param res_x: Number of ordinate pixels + :param sigma: bandwidth of Gaussian + + :returns: the persistence image + """ + return compute_pim(diagram, min_x, max_x, res_x, min_y, max_y, res_y, weight, sigma, C, p) diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in index 7f42968d..3ff68085 100644 --- a/src/cython/gudhi.pyx.in +++ b/src/cython/gudhi.pyx.in @@ -37,6 +37,7 @@ include '@CMAKE_CURRENT_SOURCE_DIR@/cython/reader_utils.pyx' include '@CMAKE_CURRENT_SOURCE_DIR@/cython/witness_complex.pyx' include '@CMAKE_CURRENT_SOURCE_DIR@/cython/strong_witness_complex.pyx' include '@CMAKE_CURRENT_SOURCE_DIR@/cython/kernels.pyx' +include '@CMAKE_CURRENT_SOURCE_DIR@/cython/vectors.pyx' @GUDHI_CYTHON_ALPHA_COMPLEX@ @GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX@ @GUDHI_CYTHON_SUBSAMPLING@ diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index 1742d016..0da28245 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -34,25 +34,24 @@ namespace Gudhi { namespace persistence_diagram { - double sw(const std::vector>& diag1, - const std::vector>& diag2, - double sigma, int N) { + + // ******************* + // Kernel evaluations. + // ******************* + + double sw(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { Gudhi::Persistence_representations::Sliced_Wasserstein sw1(diag1, sigma, N); Gudhi::Persistence_representations::Sliced_Wasserstein sw2(diag2, sigma, N); return sw1.compute_scalar_product(sw2); } - double pwg(const std::vector>& diag1, - const std::vector>& diag2, - double sigma, int N) { - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight); + double pwg(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N, double C, double p) { + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(C,p)); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(C,p)); return pwg1.compute_scalar_product(pwg2); } - double pss(const std::vector>& diag1, - const std::vector>& diag2, - double sigma, int N) { + double pss(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { std::vector> pd1 = diag1; int numpts = diag1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(diag1[i].second,diag1[i].first); std::vector> pd2 = diag2; numpts = diag2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(diag2[i].second,diag2[i].first); @@ -62,9 +61,7 @@ namespace persistence_diagram { return pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma); } - double pss_sym(const std::vector>& diag1, - const std::vector>& diag2, - double sigma, int N) { + double pss_sym(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); @@ -72,9 +69,11 @@ namespace persistence_diagram { } - std::vector > sw_matrix(const std::vector > >& s1, - const std::vector > >& s2, - double sigma, int N){ + // **************** + // Kernel matrices. + // **************** + + std::vector > sw_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N){ std::vector > matrix; std::vector ss1; int num_diag_1 = s1.size(); for(int i = 0; i < num_diag_1; i++){Gudhi::Persistence_representations::Sliced_Wasserstein sw1(s1[i], sigma, N); ss1.push_back(sw1);} @@ -87,22 +86,17 @@ namespace persistence_diagram { return matrix; } - std::vector > pwg_matrix(const std::vector > >& s1, - const std::vector > >& s2, - double sigma, int N){ + std::vector > pwg_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N, double C, double p){ std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); for(int i = 0; i < num_diag_1; i++){ std::cout << 100.0*i/num_diag_1 << " %" << std::endl; - std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pwg(s1[i], s2[j], sigma, N)); matrix.push_back(ps); + std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pwg(s1[i], s2[j], sigma, N, C, p)); matrix.push_back(ps); } return matrix; } - std::vector > pss_matrix(const std::vector > >& s1, - const std::vector > >& s2, - double sigma, int N){ - std::vector > > ss1, ss2; - std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); + std::vector > pss_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N){ + std::vector > > ss1, ss2; std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); for(int i = 0; i < num_diag_1; i++){ std::vector> pd1 = s1[i]; int numpts = s1[i].size(); for(int j = 0; j < numpts; j++) pd1.emplace_back(s1[i][j].second,s1[i][j].first); -- cgit v1.2.3 From 5e24206f945f66575c7c179d74e9661cf60ca3df Mon Sep 17 00:00:00 2001 From: mcarrier Date: Fri, 20 Apr 2018 15:56:37 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3386 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 29ea554ecd104927cce84dbc21dab531fc21265c --- src/cython/include/Vectors_interface.h | 59 ++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 src/cython/include/Vectors_interface.h diff --git a/src/cython/include/Vectors_interface.h b/src/cython/include/Vectors_interface.h new file mode 100644 index 00000000..49d28e7c --- /dev/null +++ b/src/cython/include/Vectors_interface.h @@ -0,0 +1,59 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef INCLUDE_VECTORS_INTERFACE_H_ +#define INCLUDE_VECTORS_INTERFACE_H_ + +#include +#include +#include + +#include +#include +#include // for std::pair + +using Weight = std::function) >; + +namespace Gudhi { + +namespace persistence_diagram { + + std::vector > compute_ls(const std::vector >& diag, int nb_ls, double min_x, double max_x, int res_x) { + Gudhi::Persistence_representations::Landscape L(diag, nb_ls, min_x, max_x, res_x); + return L.vectorize(); + } + + std::vector > compute_pim(const std::vector >& diag, double min_x, double max_x, int res_x, double min_y, double max_y, int res_y, std::string weight, double sigma, double C, double p) { + Weight weight_fn; + if(weight.compare("linear") == 0) weight_fn = Gudhi::Persistence_representations::Persistence_weighted_gaussian::linear_weight; + if(weight.compare("arctan") == 0) weight_fn = Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(C,p); + if(weight.compare("const") == 0) weight_fn = Gudhi::Persistence_representations::Persistence_weighted_gaussian::const_weight; + Gudhi::Persistence_representations::Persistence_image P(diag, min_x, max_x, res_x, min_y, max_y, res_y, weight_fn, sigma); + return P.vectorize(); + } + +} // namespace persistence_diagram + +} // namespace Gudhi + + +#endif // INCLUDE_VECTORS_INTERFACE_H_ -- cgit v1.2.3 From 541284f6f1bf7d4a76daac8a52850c7162a765cb Mon Sep 17 00:00:00 2001 From: mcarrier Date: Mon, 23 Apr 2018 15:22:13 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3387 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 3fe2ae4af0c7cadf507fc5148c05dcf664c5e151 --- .../doc/Persistence_representations_doc.h | 5 +- .../example/persistence_image.cpp | 2 +- .../example/persistence_weighted_gaussian.cpp | 24 +- .../example/sliced_wasserstein.cpp | 3 +- .../include/gudhi/Landscape.h | 87 ++++---- .../include/gudhi/Persistence_image.h | 41 ++-- .../include/gudhi/Persistence_weighted_gaussian.h | 70 +++--- .../include/gudhi/Sliced_Wasserstein.h | 243 ++++++++++----------- .../gudhi/common_persistence_representations.h | 22 +- 9 files changed, 242 insertions(+), 255 deletions(-) diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index 6d4cc96c..ca283017 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -24,7 +24,6 @@ #define DOC_GUDHI_STAT_H_ namespace Gudhi { - namespace Persistence_representations { /** \defgroup Persistence_representations Persistence representations @@ -254,11 +253,11 @@ namespace Persistence_representations { -\section sec_persistence_kernels Kernels on Persistence Diagrams +\section sec_persistence_kernels Kernels on persistence diagrams Reference manual: \ref Gudhi::Persistence_representations::Sliced_Wasserstein
Reference manual: \ref Gudhi::Persistence_representations::Persistence_weighted_gaussian
- Kernels for Persistence Diagrams can be regarded as infinite-dimensional vectorizations. More specifically, + Kernels for persistence diagrams can be regarded as infinite-dimensional vectorizations. More specifically, they are similarity functions whose evaluations on pairs of persistence diagrams equals the scalar products between images of these pairs under a map \f$\Phi\f$ taking values in a specific (possibly non Euclidean) Hilbert space \f$k(D_i, D_j) = \langle \Phi(D_i),\Phi(D_j)\rangle\f$. Reciprocally, classical results of learning theory ensure that such a \f$\Phi\f$ exists for a given similarity function \f$k\f$ if and only if \f$k\f$ is positive semi-definite. diff --git a/src/Persistence_representations/example/persistence_image.cpp b/src/Persistence_representations/example/persistence_image.cpp index dfa469d4..cdce3bbf 100644 --- a/src/Persistence_representations/example/persistence_image.cpp +++ b/src/Persistence_representations/example/persistence_image.cpp @@ -40,7 +40,7 @@ int main(int argc, char** argv) { persistence.push_back(std::make_pair(0, 4)); persistence.push_back(std::make_pair(3, 8)); - double min_x = 0.0; double max_x = 10.0; int res_x = 100; double min_y = 0.0; double max_y = 10.0; int res_y = 100; double sigma = 1.0; Weight weight = Gudhi::Persistence_representations::Persistence_weighted_gaussian::linear_weight; + double min_x = 0.0; double max_x = 10.0; int res_x = 100; double min_y = 0.0; double max_y = 10.0; int res_y = 100; double sigma = 1.0; Weight weight = Gudhi::Persistence_representations::linear_weight; PI pim(persistence, min_x, max_x, res_x, min_y, max_y, res_y, weight, sigma); std::vector > P = pim.vectorize(); diff --git a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp index 234f6323..db60755f 100644 --- a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp +++ b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp @@ -26,13 +26,11 @@ #include #include -using PD = std::vector >; using PWG = Gudhi::Persistence_representations::Persistence_weighted_gaussian; int main(int argc, char** argv) { - std::vector > persistence1; - std::vector > persistence2; + Persistence_diagram persistence1, persistence2; persistence1.push_back(std::make_pair(1, 2)); persistence1.push_back(std::make_pair(6, 8)); @@ -48,11 +46,11 @@ int main(int argc, char** argv) { double tau = 1; int m = 10000; - PWG PWG1(persistence1, sigma, m, PWG::arctan_weight(1,1)); - PWG PWG2(persistence2, sigma, m, PWG::arctan_weight(1,1)); + PWG PWG1(persistence1, sigma, m, Gudhi::Persistence_representations::arctan_weight(1,1)); + PWG PWG2(persistence2, sigma, m, Gudhi::Persistence_representations::arctan_weight(1,1)); - PWG PWGex1(persistence1, sigma, -1, PWG::arctan_weight(1,1)); - PWG PWGex2(persistence2, sigma, -1, PWG::arctan_weight(1,1)); + PWG PWGex1(persistence1, sigma, -1, Gudhi::Persistence_representations::arctan_weight(1,1)); + PWG PWGex2(persistence2, sigma, -1, Gudhi::Persistence_representations::arctan_weight(1,1)); // Linear PWG @@ -82,14 +80,14 @@ int main(int argc, char** argv) { // PSS - PD pd1 = persistence1; int numpts = persistence1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(persistence1[i].second,persistence1[i].first); - PD pd2 = persistence2; numpts = persistence2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(persistence2[i].second,persistence2[i].first); + Persistence_diagram pd1 = persistence1; int numpts = persistence1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(persistence1[i].second,persistence1[i].first); + Persistence_diagram pd2 = persistence2; numpts = persistence2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(persistence2[i].second,persistence2[i].first); - PWG pwg1(pd1, 2*std::sqrt(sigma), m, PWG::pss_weight); - PWG pwg2(pd2, 2*std::sqrt(sigma), m, PWG::pss_weight); + PWG pwg1(pd1, 2*std::sqrt(sigma), m, Gudhi::Persistence_representations::pss_weight); + PWG pwg2(pd2, 2*std::sqrt(sigma), m, Gudhi::Persistence_representations::pss_weight); - PWG pwgex1(pd1, 2*std::sqrt(sigma), -1, PWG::pss_weight); - PWG pwgex2(pd2, 2*std::sqrt(sigma), -1, PWG::pss_weight); + PWG pwgex1(pd1, 2*std::sqrt(sigma), -1, Gudhi::Persistence_representations::pss_weight); + PWG pwgex2(pd2, 2*std::sqrt(sigma), -1, Gudhi::Persistence_representations::pss_weight); std::cout << "Approx PSS kernel: " << pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma) << std::endl; std::cout << "Exact PSS kernel: " << pwgex1.compute_scalar_product (pwgex2) / (16*Gudhi::Persistence_representations::pi*sigma) << std::endl; diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp index f1aeea5c..d37cb23c 100644 --- a/src/Persistence_representations/example/sliced_wasserstein.cpp +++ b/src/Persistence_representations/example/sliced_wasserstein.cpp @@ -30,8 +30,7 @@ using SW = Gudhi::Persistence_representations::Sliced_Wasserstein; int main(int argc, char** argv) { - std::vector > persistence1; - std::vector > persistence2; + Persistence_diagram persistence1, persistence2; persistence1.push_back(std::make_pair(1, 2)); persistence1.push_back(std::make_pair(6, 8)); diff --git a/src/Persistence_representations/include/gudhi/Landscape.h b/src/Persistence_representations/include/gudhi/Landscape.h index d6608a57..bbbca36b 100644 --- a/src/Persistence_representations/include/gudhi/Landscape.h +++ b/src/Persistence_representations/include/gudhi/Landscape.h @@ -40,64 +40,69 @@ #include #include -using PD = std::vector >; - namespace Gudhi { namespace Persistence_representations { /** * \class Landscape gudhi/Landscape.h - * \brief A class implementing the Landscapes. + * \brief A class implementing landscapes. * * \ingroup Persistence_representations * * \details * + * The landscape is a way to turn a persistence diagram into \f$L^2\f$ functions. Roughly, the idea is to see the boundaries of the rank functions as scalar functions taking values on the diagonal. + * See \cite bubenik_landscapes_2015 for more details. Here we provide a way to approximate such functions by computing their values on a set of samples. + * **/ class Landscape { - protected: - PD diagram; - int res_x, nb_ls; - double min_x, max_x; + protected: + Persistence_diagram diagram; + int res_x, nb_ls; + double min_x, max_x; public: - /** \brief Landscape constructor. - * \ingroup Landscape - * - */ - Landscape(PD _diagram, int _nb_ls = 5, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; nb_ls = _nb_ls; min_x = _min_x; max_x = _max_x; res_x = _res_x;} - - /** \brief Computes the landscape of a diagram. - * \ingroup Landscape - * - */ - std::vector > vectorize() { - std::vector > ls; for(int i = 0; i < nb_ls; i++) ls.emplace_back(); - int num_pts = diagram.size(); double step = (max_x - min_x)/res_x; - - for(int i = 0; i < res_x; i++){ - double x = min_x + i*step; double t = x / std::sqrt(2); std::vector events; - for(int j = 0; j < num_pts; j++){ - double px = diagram[j].first; double py = diagram[j].second; - if(t >= px && t <= py){ if(t >= (px+py)/2) events.push_back(std::sqrt(2)*(py-t)); else events.push_back(std::sqrt(2)*(t-px)); } - } - - std::sort(events.begin(), events.end(), [](const double & a, const double & b){return a > b;}); int nb_events = events.size(); - for (int j = 0; j < nb_ls; j++){ if(j < nb_events) ls[j].push_back(events[j]); else ls[j].push_back(0); } - } - - return ls; - } - - - - -}; - -} // namespace Landscape + /** \brief Landscape constructor. + * \ingroup Landscape + * + * @param[in] _diagram persistence diagram. + * @param[in] _nb_ls number of landscape functions. + * @param[in] _min_x minimum value of samples. + * @param[in] _max_x maximum value of samples. + * @param[in] _res_x number of samples. + * + */ + Landscape(const Persistence_diagram & _diagram, int _nb_ls = 5, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; nb_ls = _nb_ls; min_x = _min_x; max_x = _max_x; res_x = _res_x;} + + /** \brief Computes the landscape of a diagram. + * \ingroup Landscape + * + */ + std::vector > vectorize() const { + std::vector > ls; for(int i = 0; i < nb_ls; i++) ls.emplace_back(); + int num_pts = diagram.size(); double step = (max_x - min_x)/res_x; + + for(int i = 0; i < res_x; i++){ + double x = min_x + i*step; double t = x / std::sqrt(2); std::vector events; + for(int j = 0; j < num_pts; j++){ + double px = diagram[j].first; double py = diagram[j].second; + if(t >= px && t <= py){ if(t >= (px+py)/2) events.push_back(std::sqrt(2)*(py-t)); else events.push_back(std::sqrt(2)*(t-px)); } + } + + std::sort(events.begin(), events.end(), [](const double & a, const double & b){return a > b;}); int nb_events = events.size(); + for (int j = 0; j < nb_ls; j++){ if(j < nb_events) ls[j].push_back(events[j]); else ls[j].push_back(0); } + } + return ls; + } + + + + +}; // class Landscape +} // namespace Persistence_representations } // namespace Gudhi #endif // LANDSCAPE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_image.h b/src/Persistence_representations/include/gudhi/Persistence_image.h index 6c9f75b7..76b34d8d 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_image.h +++ b/src/Persistence_representations/include/gudhi/Persistence_image.h @@ -26,8 +26,8 @@ // gudhi include #include #include +#include #include -#include // standard include #include @@ -41,39 +41,49 @@ #include #include -using PD = std::vector >; -using Weight = std::function) >; - namespace Gudhi { namespace Persistence_representations { /** * \class Persistence_image gudhi/Persistence_image.h - * \brief A class implementing the Persistence Images. + * \brief A class implementing the persistence images. * * \ingroup Persistence_representations * * \details * + * Persistence images are a way to build images from persistence diagrams. Roughly, the idea is to center Gaussians on each diagram point, with a weight that usually depends on + * the distance to the diagonal, so that the diagram is turned into a function, and then to discretize the plane into pixels, and integrate this function on each pixel. + * See \cite Persistence_Images_2017 for more details. + * **/ class Persistence_image { protected: - PD diagram; - int res_x, res_y; - double min_x, max_x, min_y, max_y; - Weight weight; - double sigma; + Persistence_diagram diagram; + int res_x, res_y; + double min_x, max_x, min_y, max_y; + Weight weight; + double sigma; public: /** \brief Persistence Image constructor. * \ingroup Persistence_image * + * @param[in] _diagram persistence diagram. + * @param[in] _min_x minimum value of pixel abscissa. + * @param[in] _max_x maximum value of pixel abscissa. + * @param[in] _res_x number of pixels for the x-direction. + * @param[in] _min_y minimum value of pixel ordinate. + * @param[in] _max_y maximum value of pixel ordinate. + * @param[in] _res_y number of pixels for the y-direction. + * @param[in] _weight weight function for the Gaussians. + * @param[in] _sigma bandwidth parameter for the Gaussians. + * */ - Persistence_image(PD _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10, double _min_y = 0.0, double _max_y = 1.0, int _res_y = 10, - Weight _weight = Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(1,1), double _sigma = 1.0){ + Persistence_image(const Persistence_diagram & _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10, double _min_y = 0.0, double _max_y = 1.0, int _res_y = 10, const Weight & _weight = arctan_weight(1,1), double _sigma = 1.0){ diagram = _diagram; min_x = _min_x; max_x = _max_x; res_x = _res_x; min_y = _min_y; max_y = _max_y; res_y = _res_y, weight = _weight; sigma = _sigma; } @@ -81,7 +91,7 @@ class Persistence_image { * \ingroup Persistence_image * */ - std::vector > vectorize() { + std::vector > vectorize() const { std::vector > im; for(int i = 0; i < res_y; i++) im.emplace_back(); double step_x = (max_x - min_x)/res_x; double step_y = (max_y - min_y)/res_y; @@ -109,9 +119,8 @@ class Persistence_image { -}; - -} // namespace Persistence_image +}; // class Persistence_image +} // namespace Persistence_representations } // namespace Gudhi #endif // PERSISTENCE_IMAGE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index 9a63fccd..76c43e65 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -26,6 +26,7 @@ // gudhi include #include #include +#include // standard include #include @@ -39,19 +40,16 @@ #include #include -using PD = std::vector >; -using Weight = std::function) >; - namespace Gudhi { namespace Persistence_representations { /** * \class Persistence_weighted_gaussian gudhi/Persistence_weighted_gaussian.h - * \brief A class implementing the Persistence Weighted Gaussian Kernel and a specific case of it called the Persistence Scale Space Kernel. + * \brief A class implementing the Persistence Weighted Gaussian kernel and a specific case thereof called the Persistence Scale Space kernel. * * \ingroup Persistence_representations * * \details - * The Persistence Weighted Gaussian Kernel is built with Gaussian Kernel Mean Embedding, meaning that each persistence diagram is first + * The Persistence Weighted Gaussian kernel is built with Gaussian Kernel Mean Embedding, meaning that each persistence diagram is first * sent to the Hilbert space of a Gaussian kernel with bandwidth parameter \f$\sigma >0\f$ using a weighted mean embedding \f$\Phi\f$: * * \f$ \Phi\,:\,D\,\rightarrow\,\sum_{p\in D}\,w(p)\,{\rm exp}\left(-\frac{\|p-\cdot\|_2^2}{2\sigma^2}\right) \f$, @@ -69,59 +67,41 @@ namespace Persistence_representations { * in the diagrams. This time can be improved by computing approximations of the kernel * with \f$m\f$ Fourier features \cite Rahimi07randomfeatures. In that case, the computation time becomes \f$O(mn)\f$. * - * The Persistence Scale Space Kernel is a Persistence Weighted Gaussian Kernel between modified diagrams: + * The Persistence Scale Space kernel is a Persistence Weighted Gaussian kernel between modified diagrams: * the symmetric of each point with respect to the diagonal is first added in each diagram, and then the weight function * is set to be +1 if the point is above the diagonal and -1 otherwise. * - * For more details, please consult Persistence Weighted Kernel for Topological Data Analysis\cite Kusano_Fukumizu_Hiraoka_PWGK - * and A Stable Multi-Scale Kernel for Topological Machine Learning\cite Reininghaus_Huber_ALL_PSSK . - * It implements the following concepts: Topological_data_with_distances, Topological_data_with_scalar_product. + * For more details, please see \cite Kusano_Fukumizu_Hiraoka_PWGK + * and \cite Reininghaus_Huber_ALL_PSSK . * **/ class Persistence_weighted_gaussian{ protected: - PD diagram; + Persistence_diagram diagram; Weight weight; double sigma; int approx; public: - /** \brief Persistence Weighted Gaussian Kernel constructor. + /** \brief Persistence Weighted Gaussian kernel constructor. * \ingroup Persistence_weighted_gaussian * * @param[in] _diagram persistence diagram. - * @param[in] _sigma bandwidth parameter of the Gaussian Kernel used for the Kernel Mean Embedding of the diagrams. + * @param[in] _sigma bandwidth parameter of the Gaussian kernel used for the Kernel Mean Embedding of the diagrams. * @param[in] _approx number of random Fourier features in case of approximate computation, set to -1 for exact computation. * @param[in] _weight weight function for the points in the diagrams. * */ - Persistence_weighted_gaussian(PD _diagram, double _sigma = 1.0, int _approx = 1000, Weight _weight = arctan_weight(1,1)){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} - - PD get_diagram() const {return this->diagram;} - double get_sigma() const {return this->sigma;} - int get_approx() const {return this->approx;} - Weight get_weight() const {return this->weight;} + Persistence_weighted_gaussian(const Persistence_diagram & _diagram, double _sigma = 1.0, int _approx = 1000, const Weight & _weight = arctan_weight(1,1)){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} // ********************************** // Utils. // ********************************** - /** \brief Specific weight of Persistence Scale Space Kernel. - * \ingroup Persistence_weighted_gaussian - * - * @param[in] p point in 2D. - * - */ - static double pss_weight(std::pair p) {if(p.second > p.first) return 1; else return -1;} - static double linear_weight(std::pair p) {return std::abs(p.second - p.first);} - static double const_weight(std::pair p) {return 1;} - static std::function) > arctan_weight(double C, double power) {return [=](std::pair p){return C * atan(std::pow(std::abs(p.second - p.first), power));};} - - - std::vector > Fourier_feat(PD diag, std::vector > z, Weight weight = arctan_weight(1,1)){ + std::vector > Fourier_feat(const Persistence_diagram & diag, const std::vector > & z, const Weight & weight = arctan_weight(1,1)) const { int md = diag.size(); std::vector > b; int mz = z.size(); for(int i = 0; i < mz; i++){ double d1 = 0; double d2 = 0; double zx = z[i].first; double zy = z[i].second; @@ -135,7 +115,7 @@ class Persistence_weighted_gaussian{ return b; } - std::vector > random_Fourier(double sigma, int m = 1000){ + std::vector > random_Fourier(double sigma, int m = 1000) const { std::normal_distribution distrib(0,1); std::vector > z; std::random_device rd; for(int i = 0; i < m; i++){ std::mt19937 e1(rd()); std::mt19937 e2(rd()); @@ -154,12 +134,14 @@ class Persistence_weighted_gaussian{ /** \brief Evaluation of the kernel on a pair of diagrams. * \ingroup Persistence_weighted_gaussian * - * @param[in] second other instance of class Persistence_weighted_gaussian. Warning: sigma, approx and weight parameters need to be the same for both instances!!! + * @pre sigma, approx and weight attributes need to be the same for both instances. + * @param[in] second other instance of class Persistence_weighted_gaussian. * */ - double compute_scalar_product(Persistence_weighted_gaussian second){ + double compute_scalar_product(const Persistence_weighted_gaussian & second) const { - PD diagram1 = this->diagram; PD diagram2 = second.diagram; + GUDHI_CHECK(this->sigma != second.sigma || this->approx != second.approx || this->weight != second.weight, std::invalid_argument("Error: different values for representations")); + Persistence_diagram diagram1 = this->diagram; Persistence_diagram diagram2 = second.diagram; if(this->approx == -1){ int num_pts1 = diagram1.size(); int num_pts2 = diagram2.size(); double k = 0; @@ -171,7 +153,7 @@ class Persistence_weighted_gaussian{ return k; } else{ - std::vector > z = random_Fourier(this->sigma, this->approx); + std::vector > z = random_Fourier(this->sigma, this->approx); std::vector > b1 = Fourier_feat(diagram1,z,this->weight); std::vector > b2 = Fourier_feat(diagram2,z,this->weight); double d = 0; for(int i = 0; i < this->approx; i++) d += b1[i].first*b2[i].first + b1[i].second*b2[i].second; @@ -182,20 +164,18 @@ class Persistence_weighted_gaussian{ /** \brief Evaluation of the distance between images of diagrams in the Hilbert space of the kernel. * \ingroup Persistence_weighted_gaussian * - * @param[in] second other instance of class Persistence_weighted_gaussian. Warning: sigma, approx and weight parameters need to be the same for both instances!!! + * @pre sigma, approx and weight attributes need to be the same for both instances. + * @param[in] second other instance of class Persistence_weighted_gaussian. * */ - double distance(Persistence_weighted_gaussian second) { - if(this->sigma != second.get_sigma() || this->approx != second.get_approx()){ - std::cout << "Error: different representations!" << std::endl; return 0; - } - else return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); + double distance(const Persistence_weighted_gaussian & second) const { + GUDHI_CHECK(this->sigma != second.sigma || this->approx != second.approx || this->weight != second.weight, std::invalid_argument("Error: different values for representations")); + return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); } -}; - -} // namespace Persistence_weighted_gaussian +}; // class Persistence_weighted_gaussian +} // namespace Persistence_representations } // namespace Gudhi #endif // PERSISTENCE_WEIGHTED_GAUSSIAN_H_ diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index 6a9a607e..235918fe 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -40,19 +40,17 @@ #include #include -using PD = std::vector >; - namespace Gudhi { namespace Persistence_representations { /** * \class Sliced_Wasserstein gudhi/Sliced_Wasserstein.h - * \brief A class implementing the Sliced Wasserstein Kernel. + * \brief A class implementing the Sliced Wasserstein kernel. * * \ingroup Persistence_representations * * \details - * The Sliced Wasserstein Kernel is defined as a Gaussian-like Kernel between persistence diagrams, where the distance used for + * The Sliced Wasserstein kernel is defined as a Gaussian-like kernel between persistence diagrams, where the distance used for * comparison is the Sliced Wasserstein distance \f$SW\f$ between persistence diagrams, defined as the integral of the 1-norm * between the sorted projections of the diagrams onto all lines passing through the origin: * @@ -65,15 +63,14 @@ namespace Persistence_representations { * * \f$ k(D_1,D_2) = {\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right).\f$ * - * For more details, please consult Sliced Wasserstein Kernel for Persistence Diagrams\cite pmlr-v70-carriere17a . - * It implements the following concepts: Topological_data_with_distances, Topological_data_with_scalar_product. + * For more details, please see \cite pmlr-v70-carriere17a . * **/ class Sliced_Wasserstein { protected: - PD diagram; + Persistence_diagram diagram; int approx; double sigma; std::vector > projections, projections_diagonal; @@ -107,7 +104,7 @@ class Sliced_Wasserstein { } - /** \brief Sliced Wasserstein Kernel constructor. + /** \brief Sliced Wasserstein kernel constructor. * \ingroup Sliced_Wasserstein * * @param[in] _diagram persistence diagram. @@ -115,21 +112,14 @@ class Sliced_Wasserstein { * @param[in] _approx number of directions used to approximate the integral in the Sliced Wasserstein distance, set to -1 for exact computation. * */ - Sliced_Wasserstein(PD _diagram, double _sigma = 1.0, int _approx = 100){diagram = _diagram; approx = _approx; sigma = _sigma; build_rep();} - - PD get_diagram() const {return this->diagram;} - int get_approx() const {return this->approx;} - double get_sigma() const {return this->sigma;} - - - + Sliced_Wasserstein(const Persistence_diagram & _diagram, double _sigma = 1.0, int _approx = 100){diagram = _diagram; approx = _approx; sigma = _sigma; build_rep();} // ********************************** // Utils. // ********************************** // Compute the angle formed by two points of a PD - double compute_angle(PD diag, int i, int j){ + double compute_angle(const Persistence_diagram & diag, int i, int j) const { std::pair vect; double x1,y1, x2,y2; x1 = diag[i].first; y1 = diag[i].second; x2 = diag[j].first; y2 = diag[j].second; @@ -150,7 +140,7 @@ class Sliced_Wasserstein { } // Compute the integral of |cos()| between alpha and beta, valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] - double compute_int_cos(const double & alpha, const double & beta){ + double compute_int_cos(double alpha, double beta) const { double res = 0; if (alpha >= 0 && alpha <= pi){ if (cos(alpha) >= 0){ @@ -175,13 +165,13 @@ class Sliced_Wasserstein { return res; } - double compute_int(const double & theta1, const double & theta2, const int & p, const int & q, const PD & PD1, const PD & PD2){ - double norm = std::sqrt( (PD1[p].first-PD2[q].first)*(PD1[p].first-PD2[q].first) + (PD1[p].second-PD2[q].second)*(PD1[p].second-PD2[q].second) ); + double compute_int(double theta1, double theta2, int p, int q, const Persistence_diagram & diag1, const Persistence_diagram & diag2) const { + double norm = std::sqrt( (diag1[p].first-diag2[q].first)*(diag1[p].first-diag2[q].first) + (diag1[p].second-diag2[q].second)*(diag1[p].second-diag2[q].second) ); double angle1; - if (PD1[p].first > PD2[q].first) - angle1 = theta1 - asin( (PD1[p].second-PD2[q].second)/norm ); + if (diag1[p].first > diag2[q].first) + angle1 = theta1 - asin( (diag1[p].second-diag2[q].second)/norm ); else - angle1 = theta1 - asin( (PD2[q].second-PD1[p].second)/norm ); + angle1 = theta1 - asin( (diag2[q].second-diag1[p].second)/norm ); double angle2 = angle1 + theta2 - theta1; double integral = compute_int_cos(angle1,angle2); return norm*integral; @@ -197,134 +187,133 @@ class Sliced_Wasserstein { /** \brief Evaluation of the Sliced Wasserstein Distance between a pair of diagrams. * \ingroup Sliced_Wasserstein * + * @pre approx attribute needs to be the same for both instances. * @param[in] second other instance of class Sliced_Wasserstein. - * For warning in red: - * @warning approx parameter needs to be the same for both instances. + * * */ - double compute_sliced_wasserstein_distance(Sliced_Wasserstein second) { + double compute_sliced_wasserstein_distance(const Sliced_Wasserstein & second) const { - GUDHI_CHECK(this->approx != second.approx, std::invalid_argument("Error: different approx values for representations")); + GUDHI_CHECK(this->approx != second.approx, std::invalid_argument("Error: different approx values for representations")); - PD diagram1 = this->diagram; PD diagram2 = second.diagram; double sw = 0; + Persistence_diagram diagram1 = this->diagram; Persistence_diagram diagram2 = second.diagram; double sw = 0; - if(this->approx == -1){ + if(this->approx == -1){ - // Add projections onto diagonal. - int n1, n2; n1 = diagram1.size(); n2 = diagram2.size(); double max_ordinate = std::numeric_limits::lowest(); - for (int i = 0; i < n2; i++){ - max_ordinate = std::max(max_ordinate, diagram2[i].second); - diagram1.emplace_back( (diagram2[i].first+diagram2[i].second)/2, (diagram2[i].first+diagram2[i].second)/2 ); - } - for (int i = 0; i < n1; i++){ - max_ordinate = std::max(max_ordinate, diagram1[i].second); - diagram2.emplace_back( (diagram1[i].first+diagram1[i].second)/2, (diagram1[i].first+diagram1[i].second)/2 ); - } - int num_pts_dgm = diagram1.size(); - - // Slightly perturb the points so that the PDs are in generic positions. - int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} - double thresh = pow(10,-5+mag); - srand(time(NULL)); - for (int i = 0; i < num_pts_dgm; i++){ - diagram1[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); diagram1[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); - diagram2[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); diagram2[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); - } + // Add projections onto diagonal. + int n1, n2; n1 = diagram1.size(); n2 = diagram2.size(); double max_ordinate = std::numeric_limits::lowest(); + for (int i = 0; i < n2; i++){ + max_ordinate = std::max(max_ordinate, diagram2[i].second); + diagram1.emplace_back( (diagram2[i].first+diagram2[i].second)/2, (diagram2[i].first+diagram2[i].second)/2 ); + } + for (int i = 0; i < n1; i++){ + max_ordinate = std::max(max_ordinate, diagram1[i].second); + diagram2.emplace_back( (diagram1[i].first+diagram1[i].second)/2, (diagram1[i].first+diagram1[i].second)/2 ); + } + int num_pts_dgm = diagram1.size(); + + // Slightly perturb the points so that the PDs are in generic positions. + int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} + double thresh = pow(10,-5+mag); + srand(time(NULL)); + for (int i = 0; i < num_pts_dgm; i++){ + diagram1[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); diagram1[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); + diagram2[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); diagram2[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); + } - // Compute all angles in both PDs. - std::vector > > angles1, angles2; - for (int i = 0; i < num_pts_dgm; i++){ - for (int j = i+1; j < num_pts_dgm; j++){ - double theta1 = compute_angle(diagram1,i,j); double theta2 = compute_angle(diagram2,i,j); - angles1.emplace_back(theta1, std::pair(i,j)); - angles2.emplace_back(theta2, std::pair(i,j)); - } + // Compute all angles in both PDs. + std::vector > > angles1, angles2; + for (int i = 0; i < num_pts_dgm; i++){ + for (int j = i+1; j < num_pts_dgm; j++){ + double theta1 = compute_angle(diagram1,i,j); double theta2 = compute_angle(diagram2,i,j); + angles1.emplace_back(theta1, std::pair(i,j)); + angles2.emplace_back(theta2, std::pair(i,j)); } + } - // Sort angles. - std::sort(angles1.begin(), angles1.end(), [=](std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); - std::sort(angles2.begin(), angles2.end(), [=](std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); - - // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). - std::vector orderp1, orderp2; - for (int i = 0; i < num_pts_dgm; i++){ orderp1.push_back(i); orderp2.push_back(i); } - std::sort( orderp1.begin(), orderp1.end(), [=](int i, int j){ if(diagram1[i].second != diagram1[j].second) return (diagram1[i].second < diagram1[j].second); else return (diagram1[i].first > diagram1[j].first); } ); - std::sort( orderp2.begin(), orderp2.end(), [=](int i, int j){ if(diagram2[i].second != diagram2[j].second) return (diagram2[i].second < diagram2[j].second); else return (diagram2[i].first > diagram2[j].first); } ); - - // Find the inverses of the orders. - std::vector order1(num_pts_dgm); std::vector order2(num_pts_dgm); - for(int i = 0; i < num_pts_dgm; i++) for (int j = 0; j < num_pts_dgm; j++) if(orderp1[j] == i){ order1[i] = j; break; } - for(int i = 0; i < num_pts_dgm; i++) for (int j = 0; j < num_pts_dgm; j++) if(orderp2[j] == i){ order2[i] = j; break; } - - // Record all inversions of points in the orders as theta varies along the positive half-disk. - std::vector > > anglePerm1(num_pts_dgm); - std::vector > > anglePerm2(num_pts_dgm); - - int m1 = angles1.size(); - for (int i = 0; i < m1; i++){ - double theta = angles1[i].first; int p = angles1[i].second.first; int q = angles1[i].second.second; - anglePerm1[order1[p]].emplace_back(p,theta); - anglePerm1[order1[q]].emplace_back(q,theta); - int a = order1[p]; int b = order1[q]; order1[p] = b; order1[q] = a; - } + // Sort angles. + std::sort(angles1.begin(), angles1.end(), [=](std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); + std::sort(angles2.begin(), angles2.end(), [=](std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); + + // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). + std::vector orderp1, orderp2; + for (int i = 0; i < num_pts_dgm; i++){ orderp1.push_back(i); orderp2.push_back(i); } + std::sort( orderp1.begin(), orderp1.end(), [=](int i, int j){ if(diagram1[i].second != diagram1[j].second) return (diagram1[i].second < diagram1[j].second); else return (diagram1[i].first > diagram1[j].first); } ); + std::sort( orderp2.begin(), orderp2.end(), [=](int i, int j){ if(diagram2[i].second != diagram2[j].second) return (diagram2[i].second < diagram2[j].second); else return (diagram2[i].first > diagram2[j].first); } ); + + // Find the inverses of the orders. + std::vector order1(num_pts_dgm); std::vector order2(num_pts_dgm); + for(int i = 0; i < num_pts_dgm; i++) for (int j = 0; j < num_pts_dgm; j++) if(orderp1[j] == i){ order1[i] = j; break; } + for(int i = 0; i < num_pts_dgm; i++) for (int j = 0; j < num_pts_dgm; j++) if(orderp2[j] == i){ order2[i] = j; break; } + + // Record all inversions of points in the orders as theta varies along the positive half-disk. + std::vector > > anglePerm1(num_pts_dgm); + std::vector > > anglePerm2(num_pts_dgm); + + int m1 = angles1.size(); + for (int i = 0; i < m1; i++){ + double theta = angles1[i].first; int p = angles1[i].second.first; int q = angles1[i].second.second; + anglePerm1[order1[p]].emplace_back(p,theta); + anglePerm1[order1[q]].emplace_back(q,theta); + int a = order1[p]; int b = order1[q]; order1[p] = b; order1[q] = a; + } - int m2 = angles2.size(); - for (int i = 0; i < m2; i++){ - double theta = angles2[i].first; int p = angles2[i].second.first; int q = angles2[i].second.second; - anglePerm2[order2[p]].emplace_back(p,theta); - anglePerm2[order2[q]].emplace_back(q,theta); - int a = order2[p]; int b = order2[q]; order2[p] = b; order2[q] = a; - } + int m2 = angles2.size(); + for (int i = 0; i < m2; i++){ + double theta = angles2[i].first; int p = angles2[i].second.first; int q = angles2[i].second.second; + anglePerm2[order2[p]].emplace_back(p,theta); + anglePerm2[order2[q]].emplace_back(q,theta); + int a = order2[p]; int b = order2[q]; order2[p] = b; order2[q] = a; + } - for (int i = 0; i < num_pts_dgm; i++){ - anglePerm1[order1[i]].emplace_back(i,pi/2); - anglePerm2[order2[i]].emplace_back(i,pi/2); - } + for (int i = 0; i < num_pts_dgm; i++){ + anglePerm1[order1[i]].emplace_back(i,pi/2); + anglePerm2[order2[i]].emplace_back(i,pi/2); + } - // Compute the SW distance with the list of inversions. - for (int i = 0; i < num_pts_dgm; i++){ - std::vector > u,v; u = anglePerm1[i]; v = anglePerm2[i]; - double theta1, theta2; theta1 = -pi/2; - unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(u[ku].second,v[kv].second); - while(theta1 != pi/2){ - if(diagram1[u[ku].first].first != diagram2[v[kv].first].first || diagram1[u[ku].first].second != diagram2[v[kv].first].second) - if(theta1 != theta2) - sw += compute_int(theta1, theta2, u[ku].first, v[kv].first, diagram1, diagram2); - theta1 = theta2; - if ( (theta2 == u[ku].second) && ku < u.size()-1 ) ku++; - if ( (theta2 == v[kv].second) && kv < v.size()-1 ) kv++; - theta2 = std::min(u[ku].second, v[kv].second); - } + // Compute the SW distance with the list of inversions. + for (int i = 0; i < num_pts_dgm; i++){ + std::vector > u,v; u = anglePerm1[i]; v = anglePerm2[i]; + double theta1, theta2; theta1 = -pi/2; + unsigned int ku, kv; ku = 0; kv = 0; theta2 = std::min(u[ku].second,v[kv].second); + while(theta1 != pi/2){ + if(diagram1[u[ku].first].first != diagram2[v[kv].first].first || diagram1[u[ku].first].second != diagram2[v[kv].first].second) + if(theta1 != theta2) + sw += compute_int(theta1, theta2, u[ku].first, v[kv].first, diagram1, diagram2); + theta1 = theta2; + if ( (theta2 == u[ku].second) && ku < u.size()-1 ) ku++; + if ( (theta2 == v[kv].second) && kv < v.size()-1 ) kv++; + theta2 = std::min(u[ku].second, v[kv].second); } } + } - else{ - - double step = pi/this->approx; - - for (int i = 0; i < this->approx; i++){ + else{ - std::vector v1; std::vector l1 = this->projections[i]; std::vector l1bis = second.projections_diagonal[i]; std::merge(l1.begin(), l1.end(), l1bis.begin(), l1bis.end(), std::back_inserter(v1)); - std::vector v2; std::vector l2 = second.projections[i]; std::vector l2bis = this->projections_diagonal[i]; std::merge(l2.begin(), l2.end(), l2bis.begin(), l2bis.end(), std::back_inserter(v2)); - int n = v1.size(); double f = 0; - for (int j = 0; j < n; j++) f += std::abs(v1[j] - v2[j]); - sw += f*step; + double step = pi/this->approx; + for (int i = 0; i < this->approx; i++){ - } + std::vector v1; std::vector l1 = this->projections[i]; std::vector l1bis = second.projections_diagonal[i]; std::merge(l1.begin(), l1.end(), l1bis.begin(), l1bis.end(), std::back_inserter(v1)); + std::vector v2; std::vector l2 = second.projections[i]; std::vector l2bis = this->projections_diagonal[i]; std::merge(l2.begin(), l2.end(), l2bis.begin(), l2bis.end(), std::back_inserter(v2)); + int n = v1.size(); double f = 0; + for (int j = 0; j < n; j++) f += std::abs(v1[j] - v2[j]); + sw += f*step; } + } - return sw/pi; + return sw/pi; } /** \brief Evaluation of the kernel on a pair of diagrams. * \ingroup Sliced_Wasserstein * - * @param[in] second other instance of class Sliced_Wasserstein. Warning: sigma and approx parameters need to be the same for both instances!!! + * @pre approx and sigma attributes need to be the same for both instances. + * @param[in] second other instance of class Sliced_Wasserstein. * */ - double compute_scalar_product(Sliced_Wasserstein second){ + double compute_scalar_product(const Sliced_Wasserstein & second) const { GUDHI_CHECK(this->sigma != second.sigma, std::invalid_argument("Error: different sigma values for representations")); return std::exp(-compute_sliced_wasserstein_distance(second)/(2*this->sigma*this->sigma)); } @@ -332,10 +321,11 @@ class Sliced_Wasserstein { /** \brief Evaluation of the distance between images of diagrams in the Hilbert space of the kernel. * \ingroup Sliced_Wasserstein * - * @param[in] second other instance of class Sliced_Wasserstein. Warning: sigma and approx parameters need to be the same for both instances!!! + * @pre approx and sigma attributes need to be the same for both instances. + * @param[in] second other instance of class Sliced_Wasserstein. * */ - double distance(Sliced_Wasserstein second) { + double distance(const Sliced_Wasserstein & second) const { GUDHI_CHECK(this->sigma != second.sigma, std::invalid_argument("Error: different sigma values for representations")); return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); } @@ -343,9 +333,8 @@ class Sliced_Wasserstein { -}; - -} // namespace Sliced_Wasserstein +}; // class Sliced_Wasserstein +} // namespace Persistence_representations } // namespace Gudhi #endif // SLICED_WASSERSTEIN_H_ diff --git a/src/Persistence_representations/include/gudhi/common_persistence_representations.h b/src/Persistence_representations/include/gudhi/common_persistence_representations.h index 90f2626d..884fce58 100644 --- a/src/Persistence_representations/include/gudhi/common_persistence_representations.h +++ b/src/Persistence_representations/include/gudhi/common_persistence_representations.h @@ -28,17 +28,28 @@ #include #include +/** + * In this module, we use the name Persistence_diagram for the representation of a diagram in a vector of pairs of two double. + */ +using Persistence_diagram = std::vector >; + +/** + * In this module, we use the name Weight for the representation of a function taking a pair of two double and returning a double. + */ +using Weight = std::function) >; + namespace Gudhi { namespace Persistence_representations { // this file contain an implementation of some common procedures used in Persistence_representations. static constexpr double pi = boost::math::constants::pi(); + // double epsi = std::numeric_limits::epsilon(); double epsi = 0.000005; /** * A procedure used to compare doubles. Typically given two doubles A and B, comparing A == B is not good idea. In this - *case, we use the procedure almostEqual with the epsi defined at + * case, we use the procedure almostEqual with the epsi defined at * the top of the file. Setting up the epsi gives the user a tolerance on what should be consider equal. **/ inline bool almost_equal(double a, double b) { @@ -55,8 +66,7 @@ double birth_plus_deaths(std::pair a) { return a.first + a.secon // landscapes /** - * Given two points in R^2, the procedure compute the parameters A and B of the line y = Ax + B that crosses those two - *points. + * Given two points in R^2, the procedure compute the parameters A and B of the line y = Ax + B that crosses those two points. **/ std::pair compute_parameters_of_a_line(std::pair p1, std::pair p2) { double a = (p2.second - p1.second) / (p2.first - p1.first); @@ -66,8 +76,7 @@ std::pair compute_parameters_of_a_line(std::pair // landscapes /** - * This procedure given two points which lies on the opposite sides of x axis, compute x for which the line connecting - *those two points crosses x axis. + * This procedure given two points which lies on the opposite sides of x axis, compute x for which the line connecting those two points crosses x axis. **/ double find_zero_of_a_line_segment_between_those_two_points(std::pair p1, std::pair p2) { @@ -91,8 +100,7 @@ double find_zero_of_a_line_segment_between_those_two_points(std::pair f, std::pair s) { if (f.first < s.first) { -- cgit v1.2.3 From 380571047eac55b826fe8e0654f9ed9b64f22ffa Mon Sep 17 00:00:00 2001 From: mcarrier Date: Mon, 23 Apr 2018 15:22:37 +0000 Subject: added weight_functions.h git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3388 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 71f741e7ca2fcacc877d44e4d6b633f283526f2f --- .../include/gudhi/Weight_functions.h | 81 ++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 src/Persistence_representations/include/gudhi/Weight_functions.h diff --git a/src/Persistence_representations/include/gudhi/Weight_functions.h b/src/Persistence_representations/include/gudhi/Weight_functions.h new file mode 100644 index 00000000..78de406d --- /dev/null +++ b/src/Persistence_representations/include/gudhi/Weight_functions.h @@ -0,0 +1,81 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef WEIGHT_FUNCTIONS_H_ +#define WEIGHT_FUNCTIONS_H_ + +// gudhi include +#include +#include + +// standard include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace Gudhi { +namespace Persistence_representations { + +/** \fn static double pss_weight(std::pair p) + * \brief Persistence Scale Space kernel weight function. + * \ingroup Persistence_representations + * + * @param[in] p point in 2D. + */ +static double pss_weight(std::pair p) {if(p.second > p.first) return 1; else return -1;} + +/** \fn static double linear_weight(std::pair p) + * \brief Linear weight function. + * \ingroup Persistence_representations + * + * @param[in] p point in 2D. + */ +static double linear_weight(std::pair p) {return std::abs(p.second - p.first);} + +/** \fn static double const_weight(std::pair p) + * \brief Constant weight function. + * \ingroup Persistence_representations + * + * @param[in] p point in 2D. + */ +static double const_weight(std::pair p) {return 1;} + +/** \fn static std::function) > arctan_weight(double C, double alpha) + * \brief Returns the arctan weight function with parameters C and alpha. + * \ingroup Persistence_representations + * + * @param[in] C positive constant. + * @param[in] alpha positive power. + */ +static std::function) > arctan_weight(double C, double alpha) {return [=](std::pair p){return C * atan(std::pow(std::abs(p.second - p.first), alpha));};} + +} // namespace Persistence_representations +} // namespace Gudhi + +#endif // WEIGHT_FUNCTIONS_H_ -- cgit v1.2.3 From ea11100d803c86a0fd5cfec1b9431110b48d87c9 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Mon, 23 Apr 2018 15:44:55 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3389 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 1569e7e9d47af54fe85e736583e53c99f1178439 --- src/cython/include/Kernels_interface.h | 12 ++++++------ src/cython/include/Vectors_interface.h | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index 0da28245..03050408 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -46,8 +46,8 @@ namespace persistence_diagram { } double pwg(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N, double C, double p) { - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(C,p)); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(C,p)); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::arctan_weight(C,p)); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::arctan_weight(C,p)); return pwg1.compute_scalar_product(pwg2); } @@ -55,15 +55,15 @@ namespace persistence_diagram { std::vector> pd1 = diag1; int numpts = diag1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(diag1[i].second,diag1[i].first); std::vector> pd2 = diag2; numpts = diag2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(diag2[i].second,diag2[i].first); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); return pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma); } double pss_sym(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::Persistence_weighted_gaussian::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); return pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma); } diff --git a/src/cython/include/Vectors_interface.h b/src/cython/include/Vectors_interface.h index 49d28e7c..7e191f2a 100644 --- a/src/cython/include/Vectors_interface.h +++ b/src/cython/include/Vectors_interface.h @@ -44,9 +44,9 @@ namespace persistence_diagram { std::vector > compute_pim(const std::vector >& diag, double min_x, double max_x, int res_x, double min_y, double max_y, int res_y, std::string weight, double sigma, double C, double p) { Weight weight_fn; - if(weight.compare("linear") == 0) weight_fn = Gudhi::Persistence_representations::Persistence_weighted_gaussian::linear_weight; - if(weight.compare("arctan") == 0) weight_fn = Gudhi::Persistence_representations::Persistence_weighted_gaussian::arctan_weight(C,p); - if(weight.compare("const") == 0) weight_fn = Gudhi::Persistence_representations::Persistence_weighted_gaussian::const_weight; + if(weight.compare("linear") == 0) weight_fn = Gudhi::Persistence_representations::linear_weight; + if(weight.compare("arctan") == 0) weight_fn = Gudhi::Persistence_representations::arctan_weight(C,p); + if(weight.compare("const") == 0) weight_fn = Gudhi::Persistence_representations::const_weight; Gudhi::Persistence_representations::Persistence_image P(diag, min_x, max_x, res_x, min_y, max_y, res_y, weight_fn, sigma); return P.vectorize(); } -- cgit v1.2.3 From 5207236bc6cfacf1789b3eac99360a272a008678 Mon Sep 17 00:00:00 2001 From: fgodi Date: Mon, 23 Apr 2018 16:17:49 +0000 Subject: Lazy ré-ajouté MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3390 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: f4b2cbaf62e0c43d29f8d52b7df45e62b52f067f --- src/Toplex_map/include/gudhi/Lazy_Toplex_map.h | 236 +++++++++++++++++++++++++ 1 file changed, 236 insertions(+) create mode 100644 src/Toplex_map/include/gudhi/Lazy_Toplex_map.h diff --git a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h new file mode 100644 index 00000000..481d33a1 --- /dev/null +++ b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h @@ -0,0 +1,236 @@ +#ifndef LAZY_TOPLEX_MAP_H +#define LAZY_TOPLEX_MAP_H + +#include +#include + +namespace Gudhi { + +class Lazy_Toplex_map { + +public: + + /** Vertex is the type of vertices. + * \ingroup toplex_map */ + typedef Toplex_map::Vertex Vertex; + + /** Simplex is the type of simplices. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex Simplex; + + /** The type of the pointers to maximal simplices. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex_ptr Simplex_ptr; + + /** The type of the sets of Simplex_ptr. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; + + template + void insert_max_simplex(const Input_vertex_range &vertex_range); + template + bool insert_simplex(const Input_vertex_range &vertex_range); + template + void remove_simplex(const Input_vertex_range &vertex_range); + + template + bool membership(const Input_vertex_range &vertex_range); + template + bool all_facets_inside(const Input_vertex_range &vertex_range); + + Vertex contraction(const Vertex x, const Vertex y); + + std::size_t num_simplices() const; + + std::unordered_map gamma0_lbounds; + +private: + template + void erase_max(const Input_vertex_range &vertex_range); + template + Vertex best_index(const Input_vertex_range &vertex_range); + void clean(const Vertex v); + + std::unordered_map t0; + bool empty_toplex; // Is the empty simplex a toplex ? + + typedef boost::heap::fibonacci_heap> PriorityQueue; + PriorityQueue cleaning_priority; + std::unordered_map cp_handles; + + std::size_t get_gamma0_lbound(const Vertex v) const; + + std::size_t size_lbound = 0; + std::size_t size = 0; + + const double alpha = 2; //time + const double betta = 4; //memory +}; + +template +void Lazy_Toplex_map::insert_max_simplex(const Input_vertex_range &vertex_range){ + for(const Vertex& v : vertex_range) + if(!gamma0_lbounds.count(v)) gamma0_lbounds.emplace(v,1); + else gamma0_lbounds[v]++; + size_lbound++; + insert_simplex(vertex_range); +} + +template +bool Lazy_Toplex_map::insert_simplex(const Input_vertex_range &vertex_range){ + Simplex sigma(vertex_range.begin(),vertex_range.end()); + empty_toplex = (sigma.size()==0); //vérifier la gestion de empty face + Simplex_ptr sptr = std::make_shared(sigma); + bool inserted = false; + for(const Vertex& v : sigma){ + if(!t0.count(v)){ + t0.emplace(v, Simplex_ptr_set()); + auto v_handle = cleaning_priority.push(std::make_pair(0, v)); + cp_handles.emplace(v, v_handle); + } + inserted = t0.at(v).emplace(sptr).second; + cleaning_priority.update(cp_handles.at(v), std::make_pair(t0.at(v).size() - get_gamma0_lbound(v),v)); + } + if(inserted) + size++; + if(size > (size_lbound+1) * betta) + clean(cleaning_priority.top().second); + return inserted; +} + +template +void Lazy_Toplex_map::remove_simplex(const Input_vertex_range &vertex_range){ + if(vertex_range.begin()==vertex_range.end()){ + t0.clear(); + gamma0_lbounds.clear(); + cleaning_priority.clear(); + size_lbound = 0; + size = 0; + empty_toplex = false; + } + else { + const Vertex& v = best_index(vertex_range); + //Copy constructor needed because the set is modified + if(t0.count(v)) for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))) + if(included(vertex_range, *sptr)){ + erase_max(*sptr); + for(const Simplex& f : facets(vertex_range)) + insert_max_simplex(f); + } + } +} + +template +bool Lazy_Toplex_map::membership(const Input_vertex_range &vertex_range){ + if(t0.size()==0 && !empty_toplex) return false; //empty complex + if(vertex_range.begin()==vertex_range.end()) return true; //empty query simplex + Vertex v = best_index(vertex_range); + if(!t0.count(v)) return false; + for(const Simplex_ptr& sptr : t0.at(v)) + if(included(vertex_range, *sptr)) return true; + return false; +} + +template +bool Lazy_Toplex_map::all_facets_inside(const Input_vertex_range &vertex_range){ + Simplex sigma(vertex_range.begin(),vertex_range.end()); + Vertex v = best_index(sigma); + if(!t0.count(v)) return false; + Simplex f = sigma; f.erase(v); + if(!membership(f)) return false; + std::unordered_set facets_inside; + for(const Simplex_ptr& sptr : t0.at(v)) + for(const Vertex& w : sigma){ + f = sigma; f.erase(w); + if(included(f, *sptr)) facets_inside.insert(w); + } + return facets_inside.size() == sigma.size() - 1; +} + +/* Returns the remaining vertex */ +Toplex_map::Vertex Lazy_Toplex_map::contraction(const Vertex x, const Vertex y){ + if(!t0.count(x)) return y; + if(!t0.count(y)) return x; + Vertex k, d; + if(t0.at(x).size() > t0.at(y).size()) + k=x, d=y; + else + k=y, d=x; + //Copy constructor needed because the set is modified + for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ + Simplex sigma(*sptr); + erase_max(sigma); + sigma.erase(d); + sigma.insert(k); + insert_simplex(sigma); + } + t0.erase(d); + return k; +} + +/* No facets insert_simplexed */ +template +inline void Lazy_Toplex_map::erase_max(const Input_vertex_range &vertex_range){ + Simplex sigma(vertex_range.begin(),vertex_range.end()); + empty_toplex = false; + Simplex_ptr sptr = std::make_shared(sigma); + bool erased; + for(const Vertex& v : sigma){ + erased = t0.at(v).erase(sptr) > 0; + if(t0.at(v).size()==0) + t0.erase(v); + } + if (erased) + size--; +} + +template +Toplex_map::Vertex Lazy_Toplex_map::best_index(const Input_vertex_range &vertex_range){ + Simplex tau(vertex_range.begin(),vertex_range.end()); + std::size_t min = std::numeric_limits::max(); Vertex arg_min = -1; + for(const Vertex& v : tau) + if(!t0.count(v)) return v; + else if(t0.at(v).size() < min) + min = t0.at(v).size(), arg_min = v; + if(min > alpha * get_gamma0_lbound(arg_min)) + clean(arg_min); + return arg_min; +} + +std::size_t Lazy_Toplex_map::get_gamma0_lbound(const Vertex v) const{ + return gamma0_lbounds.count(v) ? gamma0_lbounds.at(v) : 0; +} + + +void Lazy_Toplex_map::clean(const Vertex v){ + Toplex_map toplices; + std::unordered_map> dsorted_simplices; + int max_dim = 0; + for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))){ + if(sptr->size() > max_dim){ + for(int d = max_dim+1; d<=sptr->size(); d++) + dsorted_simplices.emplace(d, std::vector()); + max_dim = sptr->size(); + } + dsorted_simplices[sptr->size()].emplace_back(*sptr); + erase_max(*sptr); + } + for(int d = max_dim; d>=1; d--) + for(const Simplex &s : dsorted_simplices.at(d)) + if(!toplices.membership(s)) + toplices.insert_independent_simplex(s); + Simplex sv; sv.insert(v); + auto clean_cofaces = toplices.maximal_cofaces(sv); + size_lbound = size_lbound - get_gamma0_lbound(v) + clean_cofaces.size(); + gamma0_lbounds[v] = clean_cofaces.size(); + for(const Simplex_ptr& sptr : clean_cofaces) + insert_simplex(*sptr); +} + +std::size_t Lazy_Toplex_map::num_simplices() const{ + return size; +} + +} //namespace Gudhi + +#endif /* LAZY_TOPLEX_MAP_H */ -- cgit v1.2.3 From df5a7d97392469239e1f00ea50da5eb2b378b7e7 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Wed, 25 Apr 2018 16:47:55 +0000 Subject: renamed landscape and image code to make them fit in Pawel's denomination git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3396 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 5269a65810a1164cb61bcb34d23046e6a48ec355 --- .../doc/Persistence_representations_doc.h | 85 ++++++++++++---------- .../example/CMakeLists.txt | 18 ++--- src/cython/include/Kernels_interface.h | 1 + src/cython/include/Vectors_interface.h | 10 +-- 4 files changed, 61 insertions(+), 53 deletions(-) diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index ca283017..a7691324 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -128,33 +128,35 @@ namespace Persistence_representations { function \f$L : \mathbb{N} \times \mathbb{R} \to [0,\infty)\f$ of two variables, if we define \f$L(k,t) = \lambda_k(t)\f$. - The detailed description of algorithms used to compute persistence landscapes can be found in - \cite bubenik_dlotko_landscapes_2016. - Note that this implementation provides exact representation of landscapes. That have many advantages, but also a few - drawbacks. For instance, as discussed - in \cite bubenik_dlotko_landscapes_2016, the exact representation of landscape may be of quadratic size with respect - to the input persistence diagram. It may therefore happen - that, for very large diagrams, using this representation may be memory--prohibitive. In such a case, there are two - possible ways to proceed: - - \li Use non exact representation on a grid described in the Section \ref sec_landscapes_on_grid. + The detailed description of algorithms used to compute persistence landscapes can be found in \cite bubenik_dlotko_landscapes_2016. + Note that this implementation provides exact representation of landscapes. That have many advantages, but also a few drawbacks. + For instance, as discussed in \cite bubenik_dlotko_landscapes_2016, the exact representation of landscape may be of quadratic size with respect + to the input persistence diagram. It may therefore happen that, for very large diagrams, using this representation may be memory--prohibitive. + In such a case, there are two possible ways to proceed: + + \li Use representation on a grid---see section \ref sec_landscapes_on_grid. \li Compute just a number of initial nonzero landscapes. This option is available from C++ level as a last parameter of the constructor of persistence landscape (set by default to std::numeric_limits::max()). \section sec_landscapes_on_grid Persistence Landscapes on a grid + Reference manual: \ref Gudhi::Persistence_representations::Persistence_landscape_on_grid
- This is an alternative, not--exact, representation of persistence landscapes defined in the Section \ref - sec_persistence_landscapes. Unlike in the Section \ref sec_persistence_landscapes we build a - representation of persistence landscape by sampling its values on a finite, equally distributed grid of points. - Since, the persistence landscapes that originate from persistence diagrams have slope \f$1\f$ or \f$-1\f$, we have an - estimate of a region between the grid points where the landscape cab be located. - That allows to estimate an error make when performing various operations on landscape. Note that for average - landscapes the slope is in range \f$[-1,1]\f$ and similar estimate can be used. + Reference manual: \ref Gudhi::Persistence_representations::Persistence_landscape_on_grid_exact
+ + Here, we provide alternative, not exact, representations of persistence landscapes defined in Section \ref sec_persistence_landscapes. + Unlike Section \ref sec_persistence_landscapes, we build representations of persistence landscapes by evaluating the landscape functions on a finite, equally distributed grid of points. + We propose two different representations depending on whether the persistence intervals are also mapped on the grid (Persistence_landscape_on_grid) or not (Persistence_landscape_on_grid_exact). + This makes a big difference since mapping the intervals on the grid makes the computation time smaller but only provides an approximation of the landscape values. - Due to a lack of rigorous description of the algorithms to deal with this non--rigorous representation of persistence - landscapes in the literature, we are providing a short discussion of them in below. + Since persistence landscapes originating from persistence diagrams have slope \f$1\f$ or \f$-1\f$, we have an + estimate of a region between the grid points where the landscapes can be located. + That allows to estimate an error made when performing various operations on landscapes. Note that for average + landscapes the slope is in range \f$[-1,1]\f$ and similar estimates can be used. + + Due to the lack of rigorous description of the algorithms for these non rigorous representations of persistence + landscapes in the literature, we provide a short discussion below. Let us assume that we want to compute persistence landscape on a interval \f$[x,y]\f$. Let us assume that we want to use \f$N\f$ grid points for that purpose. @@ -166,11 +168,11 @@ namespace Persistence_representations { functions) on the i-th point of a grid, i.e. \f$x + i \frac{y-x}{N}\f$. When averaging two persistence landscapes represented by a grid we need to make sure that they are defined in a - compatible grids. I.e. the intervals \f$[x,y]\f$ on which they are defined are + compatible grids, i.e. the intervals \f$[x,y]\f$ on which they are defined are the same, and the numbers of grid points \f$N\f$ are the same in both cases. If this is the case, we simply compute - point-wise averages of the entries of corresponding - vectors (In this whole section we assume that if one vector of numbers is shorter than another, we extend the shorter - one with zeros so that they have the same length.) + point-wise averages of the entries of the corresponding + vectors (in this whole section we assume that if one vector of numbers is shorter than the other, we extend the shortest + one with zeros so that they have the same length). Computations of distances between two persistence landscapes on a grid is not much different than in the rigorous case. In this case, we sum up the distances between the same levels of @@ -179,11 +181,11 @@ namespace Persistence_representations { Similarly as in case of distance, when computing the scalar product of two persistence landscapes on a grid, we sum up the scalar products of corresponding levels of landscapes. For each level, - we assume that the persistence landscape on a grid between two grid points is approximated by linear function. - Therefore to compute scalar product of two corresponding levels of landscapes, + we assume that the persistence landscape on a grid between two grid points is approximated by a linear function. + Therefore to compute the scalar product of two corresponding levels of landscapes, we sum up the integrals of products of line segments for every pair of constitutive grid points. - Note that for this representation we need to specify a few parameters: + Note that for these representations we need to specify a few parameters: \li Begin and end point of a grid -- the interval \f$[x,y]\f$ (real numbers). \li Number of points in a grid (positive integer \f$N\f$). @@ -192,29 +194,33 @@ namespace Persistence_representations { Note that the same representation is used in TDA R-package \cite Fasy_Kim_Lecci_Maria_tda. \section sec_persistence_heat_maps Persistence heat maps + Reference manual: \ref Gudhi::Persistence_representations::Persistence_heat_maps
- This is a general class of discrete structures which are based on idea of placing a kernel in the points of - persistence diagrams. + Reference manual: \ref Gudhi::Persistence_representations::Persistence_heat_maps_exact
+ + This is a general class of discrete structures which are based on idea of placing a kernel in the points of persistence diagrams. This idea appeared in work by many authors over the last 15 years. As far as we know this idea was firstly described in the work of Bologna group in \cite Ferri_Frosini_comparision_sheme_1 and \cite Ferri_Frosini_comparision_sheme_2. Later it has been described by Colorado State University group in \cite Persistence_Images_2017. The presented paper - in the first time provide a discussion of stability of the representation. - Also, the same ideas are used in construction of two recent kernels used for machine learning: - \cite Kusano_Fukumizu_Hiraoka_PWGK and \cite Reininghaus_Huber_ALL_PSSK. Both the kernel's construction uses - interesting ideas to ensure stability of the representation with respect to Wasserstein metric. In the kernel + in the first time provided a discussion of stability of this representation. + Also, the same ideas are used in the construction of two recent kernels used for machine learning: + \cite Kusano_Fukumizu_Hiraoka_PWGK and \cite Reininghaus_Huber_ALL_PSSK. Both the kernels use + interesting ideas to ensure stability of the representations with respect to the 1-Wasserstein metric. In the kernel presented in \cite Kusano_Fukumizu_Hiraoka_PWGK, a scaling function is used to multiply the Gaussian kernel in the - way that the points close to diagonal got low weight and consequently do not have a big influence on the resulting + way that the points close to diagonal have low weights and consequently do not have a big influence on the resulting distribution. In \cite Reininghaus_Huber_ALL_PSSK for every point \f$(b,d)\f$ two Gaussian kernels are added: first, with a weight 1 in a point \f$(b,d)\f$, and the second, with the weight -1 for a point \f$(b,d)\f$. In both cases, the representations are stable with respect to 1-Wasserstein distance. - In Persistence\_representations package we currently implement a discretization of the distributions described above. - The base of this implementation is 2-dimensional array of pixels. Each pixel have assigned a real value which - is a sum of values of distributions induced by each point of the persistence diagram. At the moment we compute the - sum of values on a center of a pixels. It can be easily extended to any other function - (like for instance sum of integrals of the intermediate distribution on a pixel). + In Persistence_representations package, we currently implement a discretization of the distributions described above. + The base of this implementation is a 2-dimensional array of pixels. To each pixel is assigned a real value which + is the sum of the distribution values induced by each point of the persistence diagram. + As for Persistence_landscapes, we propose two different representations depending on whether the persistence intervals are also mapped on the pixels + (Persistence_heat_maps) or not (Persistence_heat_maps_exact). + At the moment we compute the sum over the evaluations of the distributions on the pixel centers. It can be easily extended to any other function + (like for instance the sum of the integrals of the distributions over the pixels). - The parameters that determine the structure are the following: + Concerning Persistence_heat_maps, the parameters that determine the structure are the following: \li A positive integer k determining the size of the kernel we used (we always assume that the kernels are square). \li A filter: in practice a square matrix of a size \f$2k+1 \times 2k+1\f$. By default, this is a discretization of @@ -226,6 +232,7 @@ namespace Persistence_representations { to diagonal are given then sometimes the kernel have support that reaches the region below the diagonal. If the value of this parameter is true, then the values below diagonal can be erased. + Concerning Persistence_heat_maps_exact, only Gaussian kernels are implemented, so the parameters are the array of pixels, the weight functions for the Gaussians and the bandwidth of the Gaussians. \section sec_persistence_vectors Persistence vectors Reference manual: \ref Gudhi::Persistence_representations::Vector_distances_in_diagram
diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index 89284e38..3142f19b 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -37,12 +37,12 @@ add_test(NAME Persistence_weighted_gaussian COMMAND $) install(TARGETS Persistence_weighted_gaussian DESTINATION bin) -add_executable ( Persistence_image persistence_image.cpp ) -add_test(NAME Persistence_image - COMMAND $) -install(TARGETS Persistence_image DESTINATION bin) - -add_executable ( Landscape landscape.cpp ) -add_test(NAME Landscape - COMMAND $) -install(TARGETS Landscape DESTINATION bin) +add_executable ( Persistence_heat_maps_exact persistence_heat_maps_exact.cpp ) +add_test(NAME Persistence_heat_maps_exact + COMMAND $) +install(TARGETS Persistence_heat_maps_exact DESTINATION bin) + +add_executable ( Persistence_landscape_on_grid_exact persistence_landscape_on_grid_exact.cpp ) +add_test(NAME Persistence_landscape_on_grid_exact + COMMAND $) +install(TARGETS Persistence_landscape_on_grid_exact DESTINATION bin) diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index 03050408..dd46656f 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -25,6 +25,7 @@ #include #include +#include #include #include diff --git a/src/cython/include/Vectors_interface.h b/src/cython/include/Vectors_interface.h index 7e191f2a..902ccc10 100644 --- a/src/cython/include/Vectors_interface.h +++ b/src/cython/include/Vectors_interface.h @@ -23,9 +23,9 @@ #ifndef INCLUDE_VECTORS_INTERFACE_H_ #define INCLUDE_VECTORS_INTERFACE_H_ -#include -#include -#include +#include +#include +#include #include #include @@ -38,7 +38,7 @@ namespace Gudhi { namespace persistence_diagram { std::vector > compute_ls(const std::vector >& diag, int nb_ls, double min_x, double max_x, int res_x) { - Gudhi::Persistence_representations::Landscape L(diag, nb_ls, min_x, max_x, res_x); + Gudhi::Persistence_representations::Persistence_landscape_on_grid_exact L(diag, nb_ls, min_x, max_x, res_x); return L.vectorize(); } @@ -47,7 +47,7 @@ namespace persistence_diagram { if(weight.compare("linear") == 0) weight_fn = Gudhi::Persistence_representations::linear_weight; if(weight.compare("arctan") == 0) weight_fn = Gudhi::Persistence_representations::arctan_weight(C,p); if(weight.compare("const") == 0) weight_fn = Gudhi::Persistence_representations::const_weight; - Gudhi::Persistence_representations::Persistence_image P(diag, min_x, max_x, res_x, min_y, max_y, res_y, weight_fn, sigma); + Gudhi::Persistence_representations::Persistence_heat_maps_exact P(diag, min_x, max_x, res_x, min_y, max_y, res_y, weight_fn, sigma); return P.vectorize(); } -- cgit v1.2.3 From 8fb3396a3e1aef4e4ed9752b28a19a0eccaac84e Mon Sep 17 00:00:00 2001 From: mcarrier Date: Wed, 25 Apr 2018 16:49:23 +0000 Subject: renamed landscape and image code to make them fit in Pawel's denomination git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3397 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: ddbd1dc31e12654bf3a9d9aaf67c4dfc482faa03 --- .../example/persistence_heat_maps_exact.cpp | 54 +++++++++ .../persistence_landscape_on_grid_exact.cpp | 51 +++++++++ .../include/gudhi/Persistence_heat_maps_exact.h | 125 +++++++++++++++++++++ .../gudhi/Persistence_landscape_on_grid_exact.h | 107 ++++++++++++++++++ 4 files changed, 337 insertions(+) create mode 100644 src/Persistence_representations/example/persistence_heat_maps_exact.cpp create mode 100644 src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp create mode 100644 src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h create mode 100644 src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h diff --git a/src/Persistence_representations/example/persistence_heat_maps_exact.cpp b/src/Persistence_representations/example/persistence_heat_maps_exact.cpp new file mode 100644 index 00000000..30346d78 --- /dev/null +++ b/src/Persistence_representations/example/persistence_heat_maps_exact.cpp @@ -0,0 +1,54 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include + +#include +#include +#include +#include + +using PI = Gudhi::Persistence_representations::Persistence_heat_maps_exact; +using Weight = std::function) >; + +int main(int argc, char** argv) { + + std::vector > persistence; + + persistence.push_back(std::make_pair(1, 2)); + persistence.push_back(std::make_pair(6, 8)); + persistence.push_back(std::make_pair(0, 4)); + persistence.push_back(std::make_pair(3, 8)); + + double min_x = 0.0; double max_x = 10.0; int res_x = 100; double min_y = 0.0; double max_y = 10.0; int res_y = 100; double sigma = 1.0; Weight weight = Gudhi::Persistence_representations::linear_weight; + + PI pim(persistence, min_x, max_x, res_x, min_y, max_y, res_y, weight, sigma); + std::vector > P = pim.vectorize(); + + for(int i = 0; i < res_y; i++){ + for(int j = 0; j < res_x; j++) std::cout << P[i][j] << " "; + std::cout << std::endl; + } + + return 0; +} diff --git a/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp b/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp new file mode 100644 index 00000000..29416693 --- /dev/null +++ b/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp @@ -0,0 +1,51 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include +#include + +using LS = Gudhi::Persistence_representations::Persistence_landscape_on_grid_exact; + +int main(int argc, char** argv) { + + std::vector > persistence; + + persistence.push_back(std::make_pair(1, 2)); + persistence.push_back(std::make_pair(6, 8)); + persistence.push_back(std::make_pair(0, 4)); + persistence.push_back(std::make_pair(3, 8)); + + int nb_ls = 3; double min_x = 0.0; double max_x = 10.0; int res_x = 100; + + LS ls(persistence, nb_ls, min_x, max_x, res_x); + std::vector > L = ls.vectorize(); + + for(int i = 0; i < nb_ls; i++){ + for(int j = 0; j < res_x; j++) std::cout << L[i][j] << " "; + std::cout << std::endl; + } + + return 0; +} diff --git a/src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h b/src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h new file mode 100644 index 00000000..25f8cb47 --- /dev/null +++ b/src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h @@ -0,0 +1,125 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef PERSISTENCE_HEAT_MAPS_EXACT_H_ +#define PERSISTENCE_HEAT_MAPS_EXACT_H_ + +// gudhi include +#include +#include +#include +#include + +// standard include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace Gudhi { +namespace Persistence_representations { + +/** + * \class Persistence_heat_maps_exact gudhi/Persistence_heat_maps_exact.h + * \brief A class implementing exact persistence heat maps. + * + * \ingroup Persistence_representations + * + * \details + * + * In this class, we propose a way to approximate persistence heat maps, or persistence surfaces, by centering weighted Gaussians on each point of the persistence diagram, and evaluating these (exact) weighted Gaussian functions + * on the pixels of a 2D grid. Note that this scheme is different from the one proposed in Persistence_heat_maps, which first maps the points of the diagram to a 2D grid, and then evaluates the (approximate) weighted Gaussian functions. + * Hence, the difference is that we do not modify the diagram in this implementation, but the code can be slower to run. +**/ + +class Persistence_heat_maps_exact { + + protected: + Persistence_diagram diagram; + int res_x, res_y; + double min_x, max_x, min_y, max_y; + Weight weight; + double sigma; + + public: + + /** \brief Persistence_heat_maps_exact constructor. + * \ingroup Persistence_heat_maps_exact + * + * @param[in] _diagram persistence diagram. + * @param[in] _min_x minimum value of pixel abscissa. + * @param[in] _max_x maximum value of pixel abscissa. + * @param[in] _res_x number of pixels for the x-direction. + * @param[in] _min_y minimum value of pixel ordinate. + * @param[in] _max_y maximum value of pixel ordinate. + * @param[in] _res_y number of pixels for the y-direction. + * @param[in] _weight weight function for the Gaussians. + * @param[in] _sigma bandwidth parameter for the Gaussians. + * + */ + Persistence_heat_maps_exact(const Persistence_diagram & _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10, double _min_y = 0.0, double _max_y = 1.0, int _res_y = 10, const Weight & _weight = arctan_weight(1,1), double _sigma = 1.0){ + diagram = _diagram; min_x = _min_x; max_x = _max_x; res_x = _res_x; min_y = _min_y; max_y = _max_y; res_y = _res_y, weight = _weight; sigma = _sigma; + } + + /** \brief Computes the persistence image of a diagram. + * \ingroup Persistence_heat_maps_exact + * + */ + std::vector > vectorize() const { + std::vector > im; for(int i = 0; i < res_y; i++) im.emplace_back(); + double step_x = (max_x - min_x)/res_x; double step_y = (max_y - min_y)/res_y; + + int num_pts = diagram.size(); + + for(int i = 0; i < res_y; i++){ + double y = min_y + i*step_y; + for(int j = 0; j < res_x; j++){ + double x = min_x + j*step_x; + + double pixel_value = 0; + for(int k = 0; k < num_pts; k++){ + double px = diagram[k].first; double py = diagram[k].second; + pixel_value += weight(std::pair(px,py)) * std::exp( -((x-px)*(x-px) + (y-(py-px))*(y-(py-px))) / (2*sigma*sigma) ) / (sigma*std::sqrt(2*pi)); + } + im[i].push_back(pixel_value); + + } + } + + return im; + + } + + + + +}; // class Persistence_heat_maps_exact +} // namespace Persistence_representations +} // namespace Gudhi + +#endif // PERSISTENCE_HEAT_MAPS_EXACT_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h new file mode 100644 index 00000000..25f71e27 --- /dev/null +++ b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h @@ -0,0 +1,107 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef LANDSCAPE_H_ +#define LANDSCAPE_H_ + +// gudhi include +#include +#include +#include + +// standard include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace Gudhi { +namespace Persistence_representations { + +/** + * \class Persistence_landscape_on_grid_exact gudhi/Persistence_landscape_on_grid_exact.h + * \brief A class implementing exact persistence landscapes by approximating them on a collection of grid points + * + * \ingroup Persistence_representations + * + * \details + * In this class, we propose a way to approximate landscapes by sampling the x-axis of the persistence diagram and evaluating the (exact) landscape functions on the sample projections onto the diagonal. Note that this is a different approximation scheme + * from the one proposed in Persistence_landscape_on_grid, which puts a grid on the diagonal, maps the persistence intervals on this grid and computes the (approximate) landscape functions on the samples. + * Hence, the difference is that we do not modify the diagram in this implementation, but the code can be slower to run. +**/ + +class Persistence_landscape_on_grid_exact { + + protected: + Persistence_diagram diagram; + int res_x, nb_ls; + double min_x, max_x; + + public: + + /** \brief Persistence_landscape_on_grid_exact constructor. + * \ingroup Persistence_landscape_on_grid_exact + * + * @param[in] _diagram persistence diagram. + * @param[in] _nb_ls number of landscape functions. + * @param[in] _min_x minimum value of samples. + * @param[in] _max_x maximum value of samples. + * @param[in] _res_x number of samples. + * + */ + Persistence_landscape_on_grid_exact(const Persistence_diagram & _diagram, int _nb_ls = 5, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; nb_ls = _nb_ls; min_x = _min_x; max_x = _max_x; res_x = _res_x;} + + /** \brief Computes the landscape approximation of a diagram. + * \ingroup Persistence_landscape_on_grid_exact + * + */ + std::vector > vectorize() const { + std::vector > ls; for(int i = 0; i < nb_ls; i++) ls.emplace_back(); + int num_pts = diagram.size(); double step = (max_x - min_x)/res_x; + + for(int i = 0; i < res_x; i++){ + double x = min_x + i*step; double t = x / std::sqrt(2); std::vector events; + for(int j = 0; j < num_pts; j++){ + double px = diagram[j].first; double py = diagram[j].second; + if(t >= px && t <= py){ if(t >= (px+py)/2) events.push_back(std::sqrt(2)*(py-t)); else events.push_back(std::sqrt(2)*(t-px)); } + } + + std::sort(events.begin(), events.end(), [](const double & a, const double & b){return a > b;}); int nb_events = events.size(); + for (int j = 0; j < nb_ls; j++){ if(j < nb_events) ls[j].push_back(events[j]); else ls[j].push_back(0); } + } + return ls; + } + + + + +}; // class Persistence_landscape_on_grid_exact +} // namespace Persistence_representations +} // namespace Gudhi + +#endif // LANDSCAPE_H_ -- cgit v1.2.3 From 0213b55ef43f11c15a8e56117da822e3a2731f18 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Wed, 25 Apr 2018 16:50:30 +0000 Subject: renamed landscape and image code to make them fit in Pawel's denomination git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3398 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: c57a2eb71d848df7bd677a8f3fc803b0c87d2d8d --- .../example/landscape.cpp | 51 --------- .../example/persistence_image.cpp | 54 --------- .../include/gudhi/Landscape.h | 108 ------------------ .../include/gudhi/Persistence_image.h | 126 --------------------- 4 files changed, 339 deletions(-) delete mode 100644 src/Persistence_representations/example/landscape.cpp delete mode 100644 src/Persistence_representations/example/persistence_image.cpp delete mode 100644 src/Persistence_representations/include/gudhi/Landscape.h delete mode 100644 src/Persistence_representations/include/gudhi/Persistence_image.h diff --git a/src/Persistence_representations/example/landscape.cpp b/src/Persistence_representations/example/landscape.cpp deleted file mode 100644 index 5fa84a7c..00000000 --- a/src/Persistence_representations/example/landscape.cpp +++ /dev/null @@ -1,51 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -#include -#include -#include - -using LS = Gudhi::Persistence_representations::Landscape; - -int main(int argc, char** argv) { - - std::vector > persistence; - - persistence.push_back(std::make_pair(1, 2)); - persistence.push_back(std::make_pair(6, 8)); - persistence.push_back(std::make_pair(0, 4)); - persistence.push_back(std::make_pair(3, 8)); - - int nb_ls = 3; double min_x = 0.0; double max_x = 10.0; int res_x = 100; - - LS ls(persistence, nb_ls, min_x, max_x, res_x); - std::vector > L = ls.vectorize(); - - for(int i = 0; i < nb_ls; i++){ - for(int j = 0; j < res_x; j++) std::cout << L[i][j] << " "; - std::cout << std::endl; - } - - return 0; -} diff --git a/src/Persistence_representations/example/persistence_image.cpp b/src/Persistence_representations/example/persistence_image.cpp deleted file mode 100644 index cdce3bbf..00000000 --- a/src/Persistence_representations/example/persistence_image.cpp +++ /dev/null @@ -1,54 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include -#include - -#include -#include -#include -#include - -using PI = Gudhi::Persistence_representations::Persistence_image; -using Weight = std::function) >; - -int main(int argc, char** argv) { - - std::vector > persistence; - - persistence.push_back(std::make_pair(1, 2)); - persistence.push_back(std::make_pair(6, 8)); - persistence.push_back(std::make_pair(0, 4)); - persistence.push_back(std::make_pair(3, 8)); - - double min_x = 0.0; double max_x = 10.0; int res_x = 100; double min_y = 0.0; double max_y = 10.0; int res_y = 100; double sigma = 1.0; Weight weight = Gudhi::Persistence_representations::linear_weight; - - PI pim(persistence, min_x, max_x, res_x, min_y, max_y, res_y, weight, sigma); - std::vector > P = pim.vectorize(); - - for(int i = 0; i < res_y; i++){ - for(int j = 0; j < res_x; j++) std::cout << P[i][j] << " "; - std::cout << std::endl; - } - - return 0; -} diff --git a/src/Persistence_representations/include/gudhi/Landscape.h b/src/Persistence_representations/include/gudhi/Landscape.h deleted file mode 100644 index bbbca36b..00000000 --- a/src/Persistence_representations/include/gudhi/Landscape.h +++ /dev/null @@ -1,108 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef LANDSCAPE_H_ -#define LANDSCAPE_H_ - -// gudhi include -#include -#include -#include - -// standard include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace Gudhi { -namespace Persistence_representations { - -/** - * \class Landscape gudhi/Landscape.h - * \brief A class implementing landscapes. - * - * \ingroup Persistence_representations - * - * \details - * - * The landscape is a way to turn a persistence diagram into \f$L^2\f$ functions. Roughly, the idea is to see the boundaries of the rank functions as scalar functions taking values on the diagonal. - * See \cite bubenik_landscapes_2015 for more details. Here we provide a way to approximate such functions by computing their values on a set of samples. - * -**/ - -class Landscape { - - protected: - Persistence_diagram diagram; - int res_x, nb_ls; - double min_x, max_x; - - public: - - /** \brief Landscape constructor. - * \ingroup Landscape - * - * @param[in] _diagram persistence diagram. - * @param[in] _nb_ls number of landscape functions. - * @param[in] _min_x minimum value of samples. - * @param[in] _max_x maximum value of samples. - * @param[in] _res_x number of samples. - * - */ - Landscape(const Persistence_diagram & _diagram, int _nb_ls = 5, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; nb_ls = _nb_ls; min_x = _min_x; max_x = _max_x; res_x = _res_x;} - - /** \brief Computes the landscape of a diagram. - * \ingroup Landscape - * - */ - std::vector > vectorize() const { - std::vector > ls; for(int i = 0; i < nb_ls; i++) ls.emplace_back(); - int num_pts = diagram.size(); double step = (max_x - min_x)/res_x; - - for(int i = 0; i < res_x; i++){ - double x = min_x + i*step; double t = x / std::sqrt(2); std::vector events; - for(int j = 0; j < num_pts; j++){ - double px = diagram[j].first; double py = diagram[j].second; - if(t >= px && t <= py){ if(t >= (px+py)/2) events.push_back(std::sqrt(2)*(py-t)); else events.push_back(std::sqrt(2)*(t-px)); } - } - - std::sort(events.begin(), events.end(), [](const double & a, const double & b){return a > b;}); int nb_events = events.size(); - for (int j = 0; j < nb_ls; j++){ if(j < nb_events) ls[j].push_back(events[j]); else ls[j].push_back(0); } - } - return ls; - } - - - - -}; // class Landscape -} // namespace Persistence_representations -} // namespace Gudhi - -#endif // LANDSCAPE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_image.h b/src/Persistence_representations/include/gudhi/Persistence_image.h deleted file mode 100644 index 76b34d8d..00000000 --- a/src/Persistence_representations/include/gudhi/Persistence_image.h +++ /dev/null @@ -1,126 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef PERSISTENCE_IMAGE_H_ -#define PERSISTENCE_IMAGE_H_ - -// gudhi include -#include -#include -#include -#include - -// standard include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace Gudhi { -namespace Persistence_representations { - -/** - * \class Persistence_image gudhi/Persistence_image.h - * \brief A class implementing the persistence images. - * - * \ingroup Persistence_representations - * - * \details - * - * Persistence images are a way to build images from persistence diagrams. Roughly, the idea is to center Gaussians on each diagram point, with a weight that usually depends on - * the distance to the diagonal, so that the diagram is turned into a function, and then to discretize the plane into pixels, and integrate this function on each pixel. - * See \cite Persistence_Images_2017 for more details. - * -**/ - -class Persistence_image { - - protected: - Persistence_diagram diagram; - int res_x, res_y; - double min_x, max_x, min_y, max_y; - Weight weight; - double sigma; - - public: - - /** \brief Persistence Image constructor. - * \ingroup Persistence_image - * - * @param[in] _diagram persistence diagram. - * @param[in] _min_x minimum value of pixel abscissa. - * @param[in] _max_x maximum value of pixel abscissa. - * @param[in] _res_x number of pixels for the x-direction. - * @param[in] _min_y minimum value of pixel ordinate. - * @param[in] _max_y maximum value of pixel ordinate. - * @param[in] _res_y number of pixels for the y-direction. - * @param[in] _weight weight function for the Gaussians. - * @param[in] _sigma bandwidth parameter for the Gaussians. - * - */ - Persistence_image(const Persistence_diagram & _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10, double _min_y = 0.0, double _max_y = 1.0, int _res_y = 10, const Weight & _weight = arctan_weight(1,1), double _sigma = 1.0){ - diagram = _diagram; min_x = _min_x; max_x = _max_x; res_x = _res_x; min_y = _min_y; max_y = _max_y; res_y = _res_y, weight = _weight; sigma = _sigma; - } - - /** \brief Computes the persistence image of a diagram. - * \ingroup Persistence_image - * - */ - std::vector > vectorize() const { - std::vector > im; for(int i = 0; i < res_y; i++) im.emplace_back(); - double step_x = (max_x - min_x)/res_x; double step_y = (max_y - min_y)/res_y; - - int num_pts = diagram.size(); - - for(int i = 0; i < res_y; i++){ - double y = min_y + i*step_y; - for(int j = 0; j < res_x; j++){ - double x = min_x + j*step_x; - - double pixel_value = 0; - for(int k = 0; k < num_pts; k++){ - double px = diagram[k].first; double py = diagram[k].second; - pixel_value += weight(std::pair(px,py)) * std::exp( -((x-px)*(x-px) + (y-(py-px))*(y-(py-px))) / (2*sigma*sigma) ) / (sigma*std::sqrt(2*pi)); - } - im[i].push_back(pixel_value); - - } - } - - return im; - - } - - - - -}; // class Persistence_image -} // namespace Persistence_representations -} // namespace Gudhi - -#endif // PERSISTENCE_IMAGE_H_ -- cgit v1.2.3 From 4ed2c5e0d99a48d242deb3318d01731cd21fd1d0 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Mon, 30 Apr 2018 12:20:54 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3408 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 6bfd16f3520497204e4bb67e0705ce120af23ec9 --- src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index 235918fe..d8ed0d98 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -232,8 +232,8 @@ class Sliced_Wasserstein { } // Sort angles. - std::sort(angles1.begin(), angles1.end(), [=](std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); - std::sort(angles2.begin(), angles2.end(), [=](std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); + std::sort(angles1.begin(), angles1.end(), [=](const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); + std::sort(angles2.begin(), angles2.end(), [=](const std::pair >& p1, const std::pair >& p2){return (p1.first < p2.first);}); // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). std::vector orderp1, orderp2; -- cgit v1.2.3 From 2aa8114f1b8e55ef3433461c72f102868ce55866 Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 3 May 2018 13:02:44 +0000 Subject: compile on linux git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3410 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 01b43f553d2095f852bb40d15abe57aa91c8f3ff --- src/Toplex_map/benchmark/CMakeLists.txt | 4 + src/Toplex_map/benchmark/chrono.cpp | 137 +++++++++++++++++++++ src/Toplex_map/benchmarks/CMakeLists.txt | 4 - src/Toplex_map/benchmarks/chrono.cpp | 137 --------------------- src/Toplex_map/include/gudhi/Filtered_toplex_map.h | 4 +- src/Toplex_map/include/gudhi/Lazy_Toplex_map.h | 1 + 6 files changed, 144 insertions(+), 143 deletions(-) create mode 100644 src/Toplex_map/benchmark/CMakeLists.txt create mode 100644 src/Toplex_map/benchmark/chrono.cpp delete mode 100644 src/Toplex_map/benchmarks/CMakeLists.txt delete mode 100644 src/Toplex_map/benchmarks/chrono.cpp diff --git a/src/Toplex_map/benchmark/CMakeLists.txt b/src/Toplex_map/benchmark/CMakeLists.txt new file mode 100644 index 00000000..2341fe06 --- /dev/null +++ b/src/Toplex_map/benchmark/CMakeLists.txt @@ -0,0 +1,4 @@ +cmake_minimum_required(VERSION 2.6) +project(Toplex_map_examples) + +add_executable(chrono chrono.cpp) diff --git a/src/Toplex_map/benchmark/chrono.cpp b/src/Toplex_map/benchmark/chrono.cpp new file mode 100644 index 00000000..10449b48 --- /dev/null +++ b/src/Toplex_map/benchmark/chrono.cpp @@ -0,0 +1,137 @@ +#include +#include +#include + +#include +#include + +using namespace Gudhi; + +typedef Simplex typeVectorVertex; +typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool; + +class ST_wrapper { + +public: + void insert_simplex(const Simplex& tau); + bool membership(const Simplex& tau); + Vertex contraction(const Vertex x, const Vertex y); + std::size_t num_simplices(); + +private: + Simplex_tree<> simplexTree; + void erase_max(const Simplex& sigma); +}; + +void ST_wrapper::insert_simplex(const Simplex& tau){ + simplexTree.insert_simplex_and_subfaces(tau); +} + +bool ST_wrapper::membership(const Simplex& tau) { + return simplexTree.find(tau) != simplexTree.null_simplex(); +} + +void ST_wrapper::erase_max(const Simplex& sigma){ + if(membership(sigma)) + simplexTree.remove_maximal_simplex(simplexTree.find(sigma)); +} + +Vertex ST_wrapper::contraction(const Vertex x, const Vertex y){ + Simplex sx; sx.insert(x); + auto hx = simplexTree.find(sx); + if(hx != simplexTree.null_simplex()) + for(auto h : simplexTree.cofaces_simplex_range(hx,0)){ + auto sr = simplexTree.simplex_vertex_range(h); + Simplex sigma(sr.begin(),sr.end()); + erase_max(sigma); + sigma.erase(x); + sigma.insert(y); + insert_simplex(sigma); + } + return y; +} + +std::size_t ST_wrapper::num_simplices(){ + return simplexTree.num_simplices(); +} + + + +int n = 300; + +int nb_insert_simplex1 = 3000; +int nb_membership1 = 4000; +int nb_contraction = 300; +int nb_insert_simplex2 = 3000; +int nb_membership2 = 400000; + +Simplex random_simplex(int n, int d){ + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution<> dis(1, n); + Simplex s; + while(s.size()!=d) + s.insert(dis(gen)); + return s; +} + +std::vector r_vector_simplices(int n, int max_d, int m){ + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution<> dis(1, max_d); + std::vector v; + for(int i=0; i +void chrono(int n, int d){ + complex_type K; + std::vector simplices_insert_simplex1 = r_vector_simplices(n,d,nb_insert_simplex1); + std::vector simplices_membership1 = r_vector_simplices(n,d,nb_membership1); + std::vector simplices_insert_simplex2 = r_vector_simplices(n - 2*nb_contraction,d,nb_insert_simplex2); + std::vector simplices_membership2 = r_vector_simplices(n - 2*nb_contraction,d,nb_membership2); + std::chrono::time_point start, end; + + for(const Simplex& s : simplices_insert_simplex1) + K.insert_simplex(s); + + for(const Simplex& s : simplices_membership1) + K.membership(s); + + start = std::chrono::system_clock::now(); + for(int i = 0; i<=nb_contraction; i++) + K.contraction(n-2*i,n-2*i-1); + end = std::chrono::system_clock::now(); + auto c3 = std::chrono::duration_cast(end-start).count(); + + start = std::chrono::system_clock::now(); + for(const Simplex& s : simplices_insert_simplex2) + K.insert_simplex(s); + end = std::chrono::system_clock::now(); + auto c1 = std::chrono::duration_cast(end-start).count(); + + start = std::chrono::system_clock::now(); + for(const Simplex& s : simplices_membership2) + K.membership(s); + end = std::chrono::system_clock::now(); + auto c2 = std::chrono::duration_cast(end-start).count(); + + std::cout << c1 << "\t \t" << c2 << "\t \t" << c3 << "\t \t" << K.num_simplices() << std::endl; +} + +int main(){ + for(int d=5;d<=40;d+=5){ + std::cout << "d=" << d << " \t Insertions \t Membership \t Contractions \t Size" << std::endl; + std::cout << "T Map \t \t"; + chrono(n,d); + std::cout << "Lazy \t \t"; + chrono(n,d); + if(d<=15){ + std::cout << "ST \t \t"; + chrono(n,d); + } + std::cout << std::endl; + } +} diff --git a/src/Toplex_map/benchmarks/CMakeLists.txt b/src/Toplex_map/benchmarks/CMakeLists.txt deleted file mode 100644 index 2341fe06..00000000 --- a/src/Toplex_map/benchmarks/CMakeLists.txt +++ /dev/null @@ -1,4 +0,0 @@ -cmake_minimum_required(VERSION 2.6) -project(Toplex_map_examples) - -add_executable(chrono chrono.cpp) diff --git a/src/Toplex_map/benchmarks/chrono.cpp b/src/Toplex_map/benchmarks/chrono.cpp deleted file mode 100644 index d93d1e1f..00000000 --- a/src/Toplex_map/benchmarks/chrono.cpp +++ /dev/null @@ -1,137 +0,0 @@ -#include -#include -#include - -#include -#include - -using namespace Gudhi; - -typedef Simplex typeVectorVertex; -typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool; - -class ST_wrapper { - -public: - void insert_simplex(const Simplex& tau); - bool membership(const Simplex& tau); - Vertex contraction(const Vertex x, const Vertex y); - std::size_t num_simplices(); - -private: - Simplex_tree<> simplexTree; - void erase_max(const Simplex& sigma); -}; - -void ST_wrapper::insert_simplex(const Simplex& tau){ - simplexTree.insert_simplex_and_subfaces(tau); -} - -bool ST_wrapper::membership(const Simplex& tau) { - return simplexTree.find(tau) != simplexTree.null_simplex(); -} - -void ST_wrapper::erase_max(const Simplex& sigma){ - if(membership(sigma)) - simplexTree.remove_maximal_simplex(simplexTree.find(sigma)); -} - -Vertex ST_wrapper::contraction(const Vertex x, const Vertex y){ - Simplex sx; sx.insert(x); - auto hx = simplexTree.find(sx); - if(hx != simplexTree.null_simplex()) - for(auto h : simplexTree.cofaces_simplex_range(hx,0)){ - auto sr = simplexTree.simplex_vertex_range(h); - Simplex sigma(sr.begin(),sr.end()); - erase_max(sigma); - sigma.erase(x); - sigma.insert(y); - insert_simplex(sigma); - } - return y; -} - -std::size_t ST_wrapper::num_simplices(){ - return simplexTree.num_simplices(); -} - - - -int n = 300; - -int nb_insert_simplex1 = 3000; -int nb_membership1 = 4000; -int nb_contraction = 300; -int nb_insert_simplex2 = 3000; -int nb_membership2 = 400000; - -Simplex random_simplex(int n, int d){ - std::random_device rd; - std::mt19937 gen(rd()); - std::uniform_int_distribution<> dis(1, n); - Simplex s; - while(s.size()!=d) - s.insert(dis(gen)); - return s; -} - -std::vector r_vector_simplices(int n, int max_d, int m){ - std::random_device rd; - std::mt19937 gen(rd()); - std::uniform_int_distribution<> dis(1, max_d); - std::vector v; - for(int i=0; i -void chrono(int n, int d){ - complex_type K; - std::vector simplices_insert_simplex1 = r_vector_simplices(n,d,nb_insert_simplex1); - std::vector simplices_membership1 = r_vector_simplices(n,d,nb_membership1); - std::vector simplices_insert_simplex2 = r_vector_simplices(n - 2*nb_contraction,d,nb_insert_simplex2); - std::vector simplices_membership2 = r_vector_simplices(n - 2*nb_contraction,d,nb_membership2); - std::chrono::time_point start, end; - - for(const Simplex& s : simplices_insert_simplex1) - K.insert_simplex(s); - - for(const Simplex& s : simplices_membership1) - K.membership(s); - - start = std::chrono::system_clock::now(); - for(int i = 0; i<=nb_contraction; i++) - K.contraction(n-2*i,n-2*i-1); - end = std::chrono::system_clock::now(); - auto c3 = std::chrono::duration_cast(end-start).count(); - - start = std::chrono::system_clock::now(); - for(const Simplex& s : simplices_insert_simplex2) - K.insert_simplex(s); - end = std::chrono::system_clock::now(); - auto c1 = std::chrono::duration_cast(end-start).count(); - - start = std::chrono::system_clock::now(); - for(const Simplex& s : simplices_membership2) - K.membership(s); - end = std::chrono::system_clock::now(); - auto c2 = std::chrono::duration_cast(end-start).count(); - - std::cout << c1 << "\t \t" << c2 << "\t \t" << c3 << "\t \t" << K.num_simplices() << std::endl; -} - -int main(){ - for(int d=5;d<=40;d+=5){ - std::cout << "d=" << d << " \t Insertions \t Membership \t Contractions \t Size" << std::endl; - std::cout << "T Map \t \t"; - chrono(n,d); - std::cout << "Lazy \t \t"; - chrono(n,d); - if(d<=15){ - std::cout << "ST \t \t"; - chrono(n,d); - } - std::cout << std::endl; - } -} diff --git a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h index a3653acd..ed65e36f 100644 --- a/src/Toplex_map/include/gudhi/Filtered_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Filtered_toplex_map.h @@ -37,7 +37,7 @@ public: * in the Filtered_toplex_map. * \ingroup toplex_map */ template - std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = nan("")); + std::pair insert_simplex_and_subfaces(const Input_vertex_range &vertex_range, Filtration_value f = std::numeric_limits::quiet_NaN()); /** Gives the filtration of the input simplex. * \ingroup toplex_map */ @@ -68,7 +68,7 @@ Filtered_toplex_map::Filtration_value Filtered_toplex_map::filtration(const Inpu for(auto kv : toplex_maps) if(kv.second->membership(vertex_range)) return kv.first; //min only because a map is ordered - return nan(""); + return std::numeric_limits::quiet_NaN() ; } template diff --git a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h index 481d33a1..50785b5a 100644 --- a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h @@ -3,6 +3,7 @@ #include #include +#include namespace Gudhi { -- cgit v1.2.3 From b088d634d6d5496548d7feb93cbd1bce1ac94d6a Mon Sep 17 00:00:00 2001 From: fgodi Date: Thu, 3 May 2018 13:22:25 +0000 Subject: strange git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3411 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 68a8c63789279eab7c0bb08df74d3ed52d2a73dd --- src/Toplex_map/benchmark/chrono.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Toplex_map/benchmark/chrono.cpp b/src/Toplex_map/benchmark/chrono.cpp index 10449b48..bbe28da0 100644 --- a/src/Toplex_map/benchmark/chrono.cpp +++ b/src/Toplex_map/benchmark/chrono.cpp @@ -7,8 +7,8 @@ using namespace Gudhi; -typedef Simplex typeVectorVertex; -typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool; +typedef Toplex_map::Vertex Vertex; +typedef Toplex_map::Simplex Simplex; class ST_wrapper { -- cgit v1.2.3 From b2f3d32845b9e3dac752311fbd3b750d8b6ba030 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 3 May 2018 15:54:01 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3412 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 612ea7ce6d4d5d743e606f9d7b443e21b25d8034 --- .../example/persistence_heat_maps_exact.cpp | 3 ++- .../example/persistence_landscape_on_grid_exact.cpp | 3 ++- .../example/persistence_weighted_gaussian.cpp | 1 + .../example/sliced_wasserstein.cpp | 1 + .../include/gudhi/common_persistence_representations.h | 16 +++++++++------- 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/src/Persistence_representations/example/persistence_heat_maps_exact.cpp b/src/Persistence_representations/example/persistence_heat_maps_exact.cpp index 30346d78..f15b710d 100644 --- a/src/Persistence_representations/example/persistence_heat_maps_exact.cpp +++ b/src/Persistence_representations/example/persistence_heat_maps_exact.cpp @@ -28,12 +28,13 @@ #include #include +using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; using PI = Gudhi::Persistence_representations::Persistence_heat_maps_exact; using Weight = std::function) >; int main(int argc, char** argv) { - std::vector > persistence; + Persistence_diagram persistence; persistence.push_back(std::make_pair(1, 2)); persistence.push_back(std::make_pair(6, 8)); diff --git a/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp b/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp index 29416693..da27bc5a 100644 --- a/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp +++ b/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp @@ -26,11 +26,12 @@ #include #include +using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; using LS = Gudhi::Persistence_representations::Persistence_landscape_on_grid_exact; int main(int argc, char** argv) { - std::vector > persistence; + Persistence_diagram persistence; persistence.push_back(std::make_pair(1, 2)); persistence.push_back(std::make_pair(6, 8)); diff --git a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp index db60755f..7945e4f1 100644 --- a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp +++ b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp @@ -26,6 +26,7 @@ #include #include +using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; using PWG = Gudhi::Persistence_representations::Persistence_weighted_gaussian; int main(int argc, char** argv) { diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp index d37cb23c..2104e2b2 100644 --- a/src/Persistence_representations/example/sliced_wasserstein.cpp +++ b/src/Persistence_representations/example/sliced_wasserstein.cpp @@ -26,6 +26,7 @@ #include #include +using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; using SW = Gudhi::Persistence_representations::Sliced_Wasserstein; int main(int argc, char** argv) { diff --git a/src/Persistence_representations/include/gudhi/common_persistence_representations.h b/src/Persistence_representations/include/gudhi/common_persistence_representations.h index 884fce58..1960e370 100644 --- a/src/Persistence_representations/include/gudhi/common_persistence_representations.h +++ b/src/Persistence_representations/include/gudhi/common_persistence_representations.h @@ -28,6 +28,15 @@ #include #include + + +namespace Gudhi { +namespace Persistence_representations { +// this file contain an implementation of some common procedures used in Persistence_representations. + +static constexpr double pi = boost::math::constants::pi(); + + /** * In this module, we use the name Persistence_diagram for the representation of a diagram in a vector of pairs of two double. */ @@ -37,13 +46,6 @@ using Persistence_diagram = std::vector >; * In this module, we use the name Weight for the representation of a function taking a pair of two double and returning a double. */ using Weight = std::function) >; - -namespace Gudhi { -namespace Persistence_representations { -// this file contain an implementation of some common procedures used in Persistence_representations. - -static constexpr double pi = boost::math::constants::pi(); - // double epsi = std::numeric_limits::epsilon(); double epsi = 0.000005; -- cgit v1.2.3 From e778857da09c25ed351b5e77dcba319ce44fdb7f Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 10 May 2018 05:52:21 +0000 Subject: added betti sequences git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3431 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 4680471ce781468c247c40fb5e6eb699dce890c8 --- .../example/CMakeLists.txt | 5 ++ .../example/betti_sequence.cpp | 49 +++++++++++ .../persistence_landscape_on_grid_exact.cpp | 4 +- .../include/gudhi/Betti_sequence.h | 95 ++++++++++++++++++++++ .../gudhi/Persistence_landscape_on_grid_exact.h | 25 +++--- 5 files changed, 164 insertions(+), 14 deletions(-) create mode 100644 src/Persistence_representations/example/betti_sequence.cpp create mode 100644 src/Persistence_representations/include/gudhi/Betti_sequence.h diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index 3142f19b..9be22085 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -46,3 +46,8 @@ add_executable ( Persistence_landscape_on_grid_exact persistence_landscape_on_gr add_test(NAME Persistence_landscape_on_grid_exact COMMAND $) install(TARGETS Persistence_landscape_on_grid_exact DESTINATION bin) + +add_executable ( Betti_sequence betti_sequence.cpp ) +add_test(NAME Betti_sequence + COMMAND $) +install(TARGETS Betti_sequence DESTINATION bin) diff --git a/src/Persistence_representations/example/betti_sequence.cpp b/src/Persistence_representations/example/betti_sequence.cpp new file mode 100644 index 00000000..a422a822 --- /dev/null +++ b/src/Persistence_representations/example/betti_sequence.cpp @@ -0,0 +1,49 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include +#include + +using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; +using BS = Gudhi::Persistence_representations::Betti_sequence; + +int main(int argc, char** argv) { + + Persistence_diagram persistence; + + persistence.push_back(std::make_pair(1, 2)); + persistence.push_back(std::make_pair(6, 8)); + persistence.push_back(std::make_pair(0, 4)); + persistence.push_back(std::make_pair(3, 8)); + + double min_x = 0; double max_x = 8; int res_x = 1000; + + BS bs(persistence, min_x, max_x, res_x); + std::vector B = bs.vectorize(); + + for(int i = 0; i < res_x; i++) std::cout << B[i] << ", "; + + return 0; +} diff --git a/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp b/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp index da27bc5a..9ce42649 100644 --- a/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp +++ b/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp @@ -38,13 +38,13 @@ int main(int argc, char** argv) { persistence.push_back(std::make_pair(0, 4)); persistence.push_back(std::make_pair(3, 8)); - int nb_ls = 3; double min_x = 0.0; double max_x = 10.0; int res_x = 100; + int nb_ls = 2; double min_x = 0; double max_x = 8; int res_x = 1000; LS ls(persistence, nb_ls, min_x, max_x, res_x); std::vector > L = ls.vectorize(); for(int i = 0; i < nb_ls; i++){ - for(int j = 0; j < res_x; j++) std::cout << L[i][j] << " "; + for(int j = 0; j < res_x; j++) std::cout << L[i][j] << ", "; std::cout << std::endl; } diff --git a/src/Persistence_representations/include/gudhi/Betti_sequence.h b/src/Persistence_representations/include/gudhi/Betti_sequence.h new file mode 100644 index 00000000..57c52ad2 --- /dev/null +++ b/src/Persistence_representations/include/gudhi/Betti_sequence.h @@ -0,0 +1,95 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carriere + * + * Copyright (C) 2018 INRIA (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef BETTI_SEQUENCE_H_ +#define BETTI_SEQUENCE_H_ + +// gudhi include +#include +#include +#include + +// standard include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace Gudhi { +namespace Persistence_representations { + +/** + * \class Betti_sequence gudhi/Betti_sequence.h + * \brief A class implementing Betti sequences + * + * \ingroup Persistence_representations + * + * \details +**/ + +class Betti_sequence { + + protected: + Persistence_diagram diagram; + int res_x, nb_cv; + double min_x, max_x; + + public: + + /** \brief Betti_sequence constructor. + * \ingroup Betti_sequence + * + * @param[in] _diagram persistence diagram. + * @param[in] _min_x minimum value of samples. + * @param[in] _max_x maximum value of samples. + * @param[in] _res_x number of samples. + * + */ + Betti_sequence(const Persistence_diagram & _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; min_x = _min_x; max_x = _max_x; res_x = _res_x;} + + /** \brief Computes the Betti sequences of a diagram. + * \ingroup Betti_sequence + * + */ + std::vector vectorize() const { + int num_pts = diagram.size(); double step = (max_x - min_x)/(res_x - 1); + std::vector bs(res_x); for(int i = 0; i < res_x; i++) bs[i] = 0; + for(int j = 0; j < num_pts; j++){ + double px = diagram[j].first; double py = diagram[j].second; + int first = std::ceil((px-min_x)/step); int last = std::ceil((py-min_x)/step); + for(int i = first; i < last; i++) bs[i] += 1; + } + + return bs; + } + +}; // class Betti_sequence +} // namespace Persistence_representations +} // namespace Gudhi + +#endif // BETTI_SEQUENCE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h index 25f71e27..52f24195 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h +++ b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h @@ -82,24 +82,25 @@ class Persistence_landscape_on_grid_exact { */ std::vector > vectorize() const { std::vector > ls; for(int i = 0; i < nb_ls; i++) ls.emplace_back(); - int num_pts = diagram.size(); double step = (max_x - min_x)/res_x; + int num_pts = diagram.size(); double step = (max_x - min_x)/(res_x - 1); + + std::vector > ls_t; for(int i = 0; i < res_x; i++) ls_t.emplace_back(); + for(int j = 0; j < num_pts; j++){ + double px = diagram[j].first; double py = diagram[j].second; double mid = (px+py)/2; + int first = std::ceil((px-min_x)/step); int middle = std::ceil((mid-min_x)/step); int last = std::ceil((py-min_x)/step); double x = min_x + first*step; + for(int i = first; i < middle; i++){ double value = std::sqrt(2)*(x-px); ls_t[i].push_back(value); x += step; } + for(int i = middle; i < last; i++){ double value = std::sqrt(2)*(py-x); ls_t[i].push_back(value); x += step; } + } for(int i = 0; i < res_x; i++){ - double x = min_x + i*step; double t = x / std::sqrt(2); std::vector events; - for(int j = 0; j < num_pts; j++){ - double px = diagram[j].first; double py = diagram[j].second; - if(t >= px && t <= py){ if(t >= (px+py)/2) events.push_back(std::sqrt(2)*(py-t)); else events.push_back(std::sqrt(2)*(t-px)); } - } - - std::sort(events.begin(), events.end(), [](const double & a, const double & b){return a > b;}); int nb_events = events.size(); - for (int j = 0; j < nb_ls; j++){ if(j < nb_events) ls[j].push_back(events[j]); else ls[j].push_back(0); } + std::sort(ls_t[i].begin(), ls_t[i].end(), [](const double & a, const double & b){return a > b;}); + int nb_events_i = ls_t[i].size(); + for (int j = 0; j < nb_ls; j++){ if(j < nb_events_i) ls[j].push_back(ls_t[i][j]); else ls[j].push_back(0); } } + return ls; } - - - }; // class Persistence_landscape_on_grid_exact } // namespace Persistence_representations } // namespace Gudhi -- cgit v1.2.3 From 4ce8d6e4f0eb5d738dba039e0e66a3c7777a9d49 Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 15 May 2018 11:56:06 +0000 Subject: toplex_map_chrono git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3438 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 196ab30aa05e434634f2c5c3149c13eb3b82bdca --- src/Toplex_map/benchmark/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Toplex_map/benchmark/CMakeLists.txt b/src/Toplex_map/benchmark/CMakeLists.txt index 2341fe06..e4cfab6f 100644 --- a/src/Toplex_map/benchmark/CMakeLists.txt +++ b/src/Toplex_map/benchmark/CMakeLists.txt @@ -1,4 +1,4 @@ cmake_minimum_required(VERSION 2.6) project(Toplex_map_examples) -add_executable(chrono chrono.cpp) +add_executable(toplex_map_chrono chrono.cpp) -- cgit v1.2.3 From ebdfe5e949b684e88b725a755c3c80c6e9083ef6 Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 15 May 2018 12:17:21 +0000 Subject: compile with gcc git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3439 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 9faf621edc02f2758cec654326a18ebfda07f546 --- src/Toplex_map/benchmark/chrono.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Toplex_map/benchmark/chrono.cpp b/src/Toplex_map/benchmark/chrono.cpp index bbe28da0..e65dcba2 100644 --- a/src/Toplex_map/benchmark/chrono.cpp +++ b/src/Toplex_map/benchmark/chrono.cpp @@ -68,7 +68,7 @@ int nb_membership2 = 400000; Simplex random_simplex(int n, int d){ std::random_device rd; std::mt19937 gen(rd()); - std::uniform_int_distribution<> dis(1, n); + std::uniform_int_distribution dis(1, n); Simplex s; while(s.size()!=d) s.insert(dis(gen)); @@ -78,7 +78,7 @@ Simplex random_simplex(int n, int d){ std::vector r_vector_simplices(int n, int max_d, int m){ std::random_device rd; std::mt19937 gen(rd()); - std::uniform_int_distribution<> dis(1, max_d); + std::uniform_int_distribution dis(1, max_d); std::vector v; for(int i=0; i Date: Tue, 15 May 2018 12:23:56 +0000 Subject: fake simplex tree in example git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3440 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 6703d83ee05c895fb08f2d8a46db758654f9eb8c --- src/Toplex_map/example/Simple_toplex_map.cpp | 2 +- src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Toplex_map/example/Simple_toplex_map.cpp b/src/Toplex_map/example/Simple_toplex_map.cpp index b165af8a..4fa735a6 100644 --- a/src/Toplex_map/example/Simple_toplex_map.cpp +++ b/src/Toplex_map/example/Simple_toplex_map.cpp @@ -21,7 +21,7 @@ */ #include -#include +#include "gudhi/Fake_simplex_tree.h" #include #include // for pair diff --git a/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp b/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp index aad31554..3df0cbd9 100644 --- a/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp +++ b/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp @@ -21,7 +21,7 @@ */ #include -#include +#include "gudhi/Fake_simplex_tree.h" #include #include -- cgit v1.2.3 From 6a450d80816647cbd5a26fbe62b7573c5f7b7ec7 Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 15 May 2018 12:24:18 +0000 Subject: fake simplex tree in example git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3441 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 7240acaf10216c579d0c395559e21287bbf96837 --- src/Toplex_map/example/Fake_simplex_tree.h | 194 +++++++++++++++++++++++++++++ 1 file changed, 194 insertions(+) create mode 100644 src/Toplex_map/example/Fake_simplex_tree.h diff --git a/src/Toplex_map/example/Fake_simplex_tree.h b/src/Toplex_map/example/Fake_simplex_tree.h new file mode 100644 index 00000000..8187e24e --- /dev/null +++ b/src/Toplex_map/example/Fake_simplex_tree.h @@ -0,0 +1,194 @@ +#ifndef FAKE_SIMPLEX_TREE_H +#define FAKE_SIMPLEX_TREE_H + +#include + +#include +#include + +#include +#include + +namespace Gudhi { + +struct Visitor { + Lazy_Toplex_map* tm; + + Visitor(Lazy_Toplex_map* tm) + :tm(tm) + {} + + template + void clique(const Clique& c, const Graph& g) + { + tm->insert_simplex(c); + } +}; + +/** Fake_simplex_tree is a wrapper for Filtered_toplex_map which has the interface of the Simplex_tree. + * Mostly for retro-compatibility purpose. If you use a function that output non maximal simplices, it will be non efficient. + * \ingroup toplex_map */ +class Fake_simplex_tree : public Filtered_toplex_map { + +public: + + /** Handle type to a vertex contained in the simplicial complex. + * \ingroup toplex_map */ + typedef Toplex_map::Vertex Vertex_handle; + + /** Handle type to a simplex contained in the simplicial complex. + * \ingroup toplex_map */ + typedef Toplex_map::Simplex Simplex_handle; + + typedef void Insertion_result_type; + + /** Inserts the flag complex of a given range `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` + * in the simplicial complex. + * \ingroup toplex_map */ + template + void insert_graph(const OneSkeletonGraph& skel_graph); + + /** Do actually nothing. + * \ingroup toplex_map */ + void expansion(int max_dim); + + /** Returns the number of vertices stored i.e. the number of max simplices + * \ingroup toplex_map */ + std::size_t num_vertices() const; + + /** Returns the dimension of the complex. + * \ingroup toplex_map */ + std::size_t dimension() const; + + /** Returns the dimension of a given simplex in the complex. + * \ingroup toplex_map */ + std::size_t dimension(Simplex_ptr& sptr) const; + + /** Returns the number of simplices stored i.e. the number of maximal simplices. + * \ingroup toplex_map */ + std::size_t num_simplices() const; + + /** Returns a range over the vertices of a simplex. + * \ingroup toplex_map */ + Toplex_map::Simplex simplex_vertex_range(const Simplex& s) const; + + /** Returns a set of all maximal (critical if there is filtration values) simplices. + * \ingroup toplex_map */ + std::vector max_simplices() const; + + /** Returns all the simplices, of max dimension d if a parameter d is given. + * \ingroup toplex_map */ + std::vector filtration_simplex_range(int d=std::numeric_limits::max()) const; + + /** Returns all the simplices of max dimension d + * \ingroup toplex_map */ + std::vector skeleton_simplex_range(int d) const; + + Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); + + +protected: + + /** \internal Does all the facets of the given simplex belong to the complex ? + * \ingroup toplex_map */ + template + bool all_facets_inside(const Input_vertex_range &vertex_range) const; + +}; + +template +void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ + toplex_maps.emplace(nan(""), new Lazy_Toplex_map()); + using vertex_iterator = typename boost::graph_traits::vertex_iterator; + vertex_iterator vi, vi_end; + for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { + Simplex s; s.insert(*vi); + insert_simplex_and_subfaces(s); + } + bron_kerbosch_all_cliques(skel_graph, Visitor(this->toplex_maps.at(nan("")))); +} + +void Fake_simplex_tree::expansion(int max_dim){} + +template +bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range) const{ + Simplex sigma(vertex_range); + for(const Simplex& s : facets(sigma)) + if(!membership(s)) return false; + return true; +} +/* +std::size_t Fake_simplex_tree::dimension() const { + std::size_t max = 0; + for(const Simplex& s : max_simplices()) + max = std::max(max, s.size()); + return max-1; +} +*/ +std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ + return sptr->size(); +} +/* +std::size_t Fake_simplex_tree::num_simplices() const { + //return filtration_simplex_range().size(); + return max_simplices().size(); +} +*/ +std::size_t Fake_simplex_tree::num_vertices() const { + /* + std::unordered_set vertices; + for(const Toplex_map::Simplex& s : max_simplices()) + for (Toplex_map::Vertex v : s) + vertices.emplace(v); + return vertices.size(); + */ + return 0; +} + +Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { + return s; +} + +/* +std::vector Fake_simplex_tree::max_simplices() const{ + std::vector max_s; + for(auto kv : toplex_maps) + for(const Toplex_map::Simplex_ptr& sptr : kv.second->maximal_cofaces(Simplex())) + max_s.emplace_back(*sptr); + return max_s; +} +*/ +/* +std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ + std::vector m = max_simplices(); + std::vector range; + Toplex_map::Simplex_ptr_set seen; + while(m.begin()!=m.end()){ + Toplex_map::Simplex s(m.back()); + m.pop_back(); + if(seen.find(get_key(s))==seen.end()){ + if((int) s.size()-1 <=d) + range.emplace_back(s); + seen.emplace(get_key(s)); + if(s.size()>0) + for(Simplex& sigma : facets(s)) + m.emplace_back(sigma); + } + } + return range; +} + +std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ + return filtration_simplex_range(d); +}*/ + +Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ + for(auto kv : toplex_maps) + kv.second->contraction(x,y); + return y; +} + +} //namespace Gudhi + +#endif /* FAKE_SIMPLEX_TREE_H */ + -- cgit v1.2.3 From e94d787c89a7c9a71c86118bc3e048241e9c5ca1 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 15 May 2018 14:16:45 +0000 Subject: Add examples with Fake_simplex_tree git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3442 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: ccdd8f43c0e86a98efd3f9d7e9c7b8728ebd27a7 --- src/Toplex_map/benchmark/CMakeLists.txt | 2 +- src/Toplex_map/example/CMakeLists.txt | 5 +++++ src/Toplex_map/example/Fake_simplex_tree.h | 19 ++++++---------- src/Toplex_map/example/Simple_toplex_map.cpp | 26 +++++++++++++--------- .../example/Toplex_map_from_cliques_of_graph.cpp | 5 ++--- 5 files changed, 30 insertions(+), 27 deletions(-) create mode 100644 src/Toplex_map/example/CMakeLists.txt diff --git a/src/Toplex_map/benchmark/CMakeLists.txt b/src/Toplex_map/benchmark/CMakeLists.txt index e4cfab6f..2c67892c 100644 --- a/src/Toplex_map/benchmark/CMakeLists.txt +++ b/src/Toplex_map/benchmark/CMakeLists.txt @@ -1,4 +1,4 @@ cmake_minimum_required(VERSION 2.6) -project(Toplex_map_examples) +project(Toplex_map_benchmark) add_executable(toplex_map_chrono chrono.cpp) diff --git a/src/Toplex_map/example/CMakeLists.txt b/src/Toplex_map/example/CMakeLists.txt new file mode 100644 index 00000000..051d7bcd --- /dev/null +++ b/src/Toplex_map/example/CMakeLists.txt @@ -0,0 +1,5 @@ +cmake_minimum_required(VERSION 2.6) +project(Toplex_map_examples) + +add_executable(Toplex_map_example_simple Simple_toplex_map.cpp) +add_executable(Toplex_map_example_from_cliques_of_graph Toplex_map_from_cliques_of_graph.cpp) diff --git a/src/Toplex_map/example/Fake_simplex_tree.h b/src/Toplex_map/example/Fake_simplex_tree.h index 8187e24e..c3d87e47 100644 --- a/src/Toplex_map/example/Fake_simplex_tree.h +++ b/src/Toplex_map/example/Fake_simplex_tree.h @@ -5,6 +5,7 @@ #include #include +#include #include #include @@ -117,39 +118,34 @@ bool Fake_simplex_tree::all_facets_inside(const Input_vertex_range &vertex_range if(!membership(s)) return false; return true; } -/* + std::size_t Fake_simplex_tree::dimension() const { std::size_t max = 0; for(const Simplex& s : max_simplices()) max = std::max(max, s.size()); return max-1; } -*/ + std::size_t Fake_simplex_tree::dimension(Simplex_ptr& sptr) const{ return sptr->size(); } -/* + std::size_t Fake_simplex_tree::num_simplices() const { - //return filtration_simplex_range().size(); return max_simplices().size(); } -*/ + std::size_t Fake_simplex_tree::num_vertices() const { - /* std::unordered_set vertices; for(const Toplex_map::Simplex& s : max_simplices()) for (Toplex_map::Vertex v : s) vertices.emplace(v); return vertices.size(); - */ - return 0; } Toplex_map::Simplex Fake_simplex_tree::simplex_vertex_range(const Simplex& s) const { return s; } -/* std::vector Fake_simplex_tree::max_simplices() const{ std::vector max_s; for(auto kv : toplex_maps) @@ -157,8 +153,7 @@ std::vector Fake_simplex_tree::max_simplices() const{ max_s.emplace_back(*sptr); return max_s; } -*/ -/* + std::vector Fake_simplex_tree::filtration_simplex_range(int d) const{ std::vector m = max_simplices(); std::vector range; @@ -180,7 +175,7 @@ std::vector Fake_simplex_tree::filtration_simplex_range(int std::vector Fake_simplex_tree::skeleton_simplex_range(int d) const{ return filtration_simplex_range(d); -}*/ +} Toplex_map::Vertex Fake_simplex_tree::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ for(auto kv : toplex_maps) diff --git a/src/Toplex_map/example/Simple_toplex_map.cpp b/src/Toplex_map/example/Simple_toplex_map.cpp index 4fa735a6..d383e84b 100644 --- a/src/Toplex_map/example/Simple_toplex_map.cpp +++ b/src/Toplex_map/example/Simple_toplex_map.cpp @@ -21,7 +21,7 @@ */ #include -#include "gudhi/Fake_simplex_tree.h" +#include "Fake_simplex_tree.h" #include #include // for pair @@ -184,10 +184,12 @@ int main(int argc, char * const argv[]) { << " simplices - dimension is " << t_map.dimension() << "\n"; std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n"; for (auto f_simplex : t_map.filtration_simplex_range()) { - std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; - for (auto vertex : t_map.simplex_vertex_range(f_simplex)) - std::cout << "(" << vertex << ")"; - std::cout << std::endl; + if (f_simplex.size() > 0) { + std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; + for (auto vertex : t_map.simplex_vertex_range(f_simplex)) + std::cout << "(" << vertex << ")"; + std::cout << std::endl; + } } // [0.1] 0 // [0.1] 1 @@ -201,13 +203,15 @@ int main(int argc, char * const argv[]) { std::cout << std::endl << std::endl; - std::cout << "Iterator on skeleton:" << std::endl; - for (auto f_simplex : t_map.skeleton_simplex_range()) { - std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; - for (auto vertex : t_map.simplex_vertex_range(f_simplex)) { - std::cout << vertex << " "; + std::cout << "Iterator on skeleton[1]:" << std::endl; + for (auto f_simplex : t_map.skeleton_simplex_range(1)) { + if (f_simplex.size() > 0) { + std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; + for (auto vertex : t_map.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; } - std::cout << std::endl; } return 0; diff --git a/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp b/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp index 3df0cbd9..c43f1b69 100644 --- a/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp +++ b/src/Toplex_map/example/Toplex_map_from_cliques_of_graph.cpp @@ -21,7 +21,7 @@ */ #include -#include "gudhi/Fake_simplex_tree.h" +#include "Fake_simplex_tree.h" #include #include @@ -82,7 +82,7 @@ int main(int argc, char * const argv[]) { std::cout << std::endl << std::endl; std::cout << "Iterator on skeleton:" << std::endl; - for (auto f_simplex : t_map.skeleton_simplex_range()) { + for (auto f_simplex : t_map.skeleton_simplex_range(max_dim)) { std::cout << " " << "[" << t_map.filtration(f_simplex) << "] "; for (auto vertex : t_map.simplex_vertex_range(f_simplex)) { std::cout << vertex << " "; @@ -91,4 +91,3 @@ int main(int argc, char * const argv[]) { } return 0; } -} -- cgit v1.2.3 From 04da883af94d96ea54faf09a1fb87e3b8e0bf847 Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 15 May 2018 14:21:13 +0000 Subject: fake simplex tree for toplex map git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3443 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 7bdafb844ee99d834cfd47b37ae32cb52863b4b1 --- src/Toplex_map/example/Fake_simplex_tree.h | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/Toplex_map/example/Fake_simplex_tree.h b/src/Toplex_map/example/Fake_simplex_tree.h index c3d87e47..6a7e7bdc 100644 --- a/src/Toplex_map/example/Fake_simplex_tree.h +++ b/src/Toplex_map/example/Fake_simplex_tree.h @@ -5,7 +5,6 @@ #include #include -#include #include #include @@ -13,9 +12,9 @@ namespace Gudhi { struct Visitor { - Lazy_Toplex_map* tm; + Toplex_map* tm; - Visitor(Lazy_Toplex_map* tm) + Visitor(Toplex_map* tm) :tm(tm) {} @@ -99,7 +98,7 @@ protected: template void Fake_simplex_tree::insert_graph(const OneSkeletonGraph& skel_graph){ - toplex_maps.emplace(nan(""), new Lazy_Toplex_map()); + toplex_maps.emplace(nan(""), new Toplex_map()); using vertex_iterator = typename boost::graph_traits::vertex_iterator; vertex_iterator vi, vi_end; for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) { -- cgit v1.2.3 From db4d235b9f7995351f3d4559a132dfe15f30b655 Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 15 May 2018 15:39:51 +0000 Subject: better unit test git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3444 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 0dd6c254d59c71e4bafe7413e87ce0d27c87a4ab --- src/Toplex_map/include/gudhi/Lazy_Toplex_map.h | 4 ++-- src/Toplex_map/test/CMakeLists.txt | 1 + src/Toplex_map/test/toplex_map_unit_test.cpp | 11 +++++++++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h index 50785b5a..396961fe 100644 --- a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h @@ -64,8 +64,8 @@ private: std::size_t size_lbound = 0; std::size_t size = 0; - const double alpha = 2; //time - const double betta = 4; //memory + const double alpha = 4; //time + const double betta = 8; //memory }; template diff --git a/src/Toplex_map/test/CMakeLists.txt b/src/Toplex_map/test/CMakeLists.txt index 5ed55e97..8bf5cf14 100644 --- a/src/Toplex_map/test/CMakeLists.txt +++ b/src/Toplex_map/test/CMakeLists.txt @@ -1,6 +1,7 @@ cmake_minimum_required(VERSION 2.6) project(Toplex_map_tests) +add_executable(chrono chrono.cpp) add_executable ( ToplexMapUT toplex_map_unit_test.cpp ) target_link_libraries(ToplexMapUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp index 95ee7a02..c12ad094 100644 --- a/src/Toplex_map/test/toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/toplex_map_unit_test.cpp @@ -28,10 +28,17 @@ BOOST_AUTO_TEST_CASE(toplexmap) { K.insert_simplex(sigma6); K.insert_simplex(sigma7); BOOST_CHECK(K.membership(sigma4)); - BOOST_CHECK(!K.maximality(sigma5)); + BOOST_CHECK(!K.maximality(sigma3)); BOOST_CHECK(!K.membership(sigma5)); - K.contraction(4,5); + K.insert_simplex(sigma5); + std::vector sigma9 = {1, 2, 3}; + std::vector sigma10 = {2, 7}; + auto r = K.contraction(4,5); + sigma9.emplace_back(r); + sigma10.emplace_back(r); BOOST_CHECK(!K.membership(sigma6)); + BOOST_CHECK(K.membership(sigma9)); + BOOST_CHECK(K.membership(sigma10)); } -- cgit v1.2.3 From 68443280388d3a83adc3f927b3252b2debafb11c Mon Sep 17 00:00:00 2001 From: fgodi Date: Wed, 23 May 2018 12:49:36 +0000 Subject: strange git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3454 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 9860c0af5c014faeb5d16b81e1fafa8a4eb27786 --- src/Toplex_map/include/gudhi/Lazy_Toplex_map.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h index 396961fe..31f3da4b 100644 --- a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h @@ -175,7 +175,7 @@ inline void Lazy_Toplex_map::erase_max(const Input_vertex_range &vertex_range){ Simplex sigma(vertex_range.begin(),vertex_range.end()); empty_toplex = false; Simplex_ptr sptr = std::make_shared(sigma); - bool erased; + bool erased=false; for(const Vertex& v : sigma){ erased = t0.at(v).erase(sptr) > 0; if(t0.at(v).size()==0) @@ -206,17 +206,17 @@ std::size_t Lazy_Toplex_map::get_gamma0_lbound(const Vertex v) const{ void Lazy_Toplex_map::clean(const Vertex v){ Toplex_map toplices; std::unordered_map> dsorted_simplices; - int max_dim = 0; + std::size_t max_dim = 0; for(const Simplex_ptr& sptr : Simplex_ptr_set(t0.at(v))){ if(sptr->size() > max_dim){ - for(int d = max_dim+1; d<=sptr->size(); d++) + for(std::size_t d = max_dim+1; d<=sptr->size(); d++) dsorted_simplices.emplace(d, std::vector()); max_dim = sptr->size(); } dsorted_simplices[sptr->size()].emplace_back(*sptr); erase_max(*sptr); } - for(int d = max_dim; d>=1; d--) + for(std::size_t d = max_dim; d>=1; d--) for(const Simplex &s : dsorted_simplices.at(d)) if(!toplices.membership(s)) toplices.insert_independent_simplex(s); -- cgit v1.2.3 From 0522bc89cf47440088c284c640d9800504f7548e Mon Sep 17 00:00:00 2001 From: fgodi Date: Wed, 23 May 2018 12:49:44 +0000 Subject: strange git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3455 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 4e66029b1f96b6843de7c5249b5e5206a964e619 --- src/Toplex_map/benchmark/chrono.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/Toplex_map/benchmark/chrono.cpp b/src/Toplex_map/benchmark/chrono.cpp index e65dcba2..a745f099 100644 --- a/src/Toplex_map/benchmark/chrono.cpp +++ b/src/Toplex_map/benchmark/chrono.cpp @@ -3,12 +3,14 @@ #include #include +#include #include using namespace Gudhi; -typedef Toplex_map::Vertex Vertex; typedef Toplex_map::Simplex Simplex; +typedef Toplex_map::Vertex Vertex; +typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool; class ST_wrapper { @@ -65,7 +67,7 @@ int nb_contraction = 300; int nb_insert_simplex2 = 3000; int nb_membership2 = 400000; -Simplex random_simplex(int n, int d){ +Simplex random_simplex(int n, std::size_t d){ std::random_device rd; std::mt19937 gen(rd()); std::uniform_int_distribution dis(1, n); -- cgit v1.2.3 From caed57893851dddad58f63d68ea41be724582cc1 Mon Sep 17 00:00:00 2001 From: fgodi Date: Wed, 23 May 2018 17:12:50 +0000 Subject: bug marc git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3456 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 71d185c40486ba495bd113ec5abf683d6e3b1cd9 --- src/Toplex_map/include/gudhi/Toplex_map.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index ccea34d5..b6bb5381 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -220,7 +220,8 @@ template void Toplex_map::insert_independent_simplex(const Input_vertex_range &vertex_range){ for(const Toplex_map::Vertex& v : vertex_range){ if(!t0.count(v)) t0.emplace(v, Simplex_ptr_set()); - t0.at(v).emplace(get_key(vertex_range)); + auto k = get_key(vertex_range); + t0.at(v).emplace(k); } } -- cgit v1.2.3 From 9b3f3e610646b9a2d35369bdb7a6f272e816eb34 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Tue, 29 May 2018 08:30:12 +0000 Subject: minor change git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3480 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: f14f9a0909174ba3569560d00c02d92360b8c0e5 --- .../include/gudhi/Persistence_weighted_gaussian.h | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h index 76c43e65..9ef47bf1 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h @@ -39,6 +39,7 @@ #include #include #include +#include namespace Gudhi { namespace Persistence_representations { -- cgit v1.2.3 From 0ccb8a63c7aa857e99802a6bce2ff3aa8c4b3d65 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 30 May 2018 05:40:59 +0000 Subject: Add Toplex_map image path in Doxyfile Fix Toplex_map test CMakeLists.txt issue git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3490 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 988ba1cd8d198444f70ab648bfd34e10cc82f6eb --- src/Doxyfile | 3 ++- src/Toplex_map/test/CMakeLists.txt | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Doxyfile b/src/Doxyfile index 0ef81e5c..bc6dc279 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -851,7 +851,8 @@ IMAGE_PATH = doc/Skeleton_blocker/ \ doc/Subsampling/ \ doc/Spatial_searching/ \ doc/Tangential_complex/ \ - doc/Bottleneck_distance/ + doc/Bottleneck_distance/ \ + doc/Toplex_map/ # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program diff --git a/src/Toplex_map/test/CMakeLists.txt b/src/Toplex_map/test/CMakeLists.txt index 8bf5cf14..5ed55e97 100644 --- a/src/Toplex_map/test/CMakeLists.txt +++ b/src/Toplex_map/test/CMakeLists.txt @@ -1,7 +1,6 @@ cmake_minimum_required(VERSION 2.6) project(Toplex_map_tests) -add_executable(chrono chrono.cpp) add_executable ( ToplexMapUT toplex_map_unit_test.cpp ) target_link_libraries(ToplexMapUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) -- cgit v1.2.3 From 10c6f6be72a2631cd1a1d28ed61343d55bd2b759 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 14 Jun 2018 09:25:16 +0000 Subject: small modif on PWGK git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3612 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: c0b8c70acfbf1a7f4b7bddb69a161086fb249c76 --- src/cython/cython/kernels.pyx | 26 ++++++++++++++------------ src/cython/cython/vectors.pyx | 3 +++ src/cython/include/Kernels_interface.h | 21 +++++++++++++-------- 3 files changed, 30 insertions(+), 20 deletions(-) diff --git a/src/cython/cython/kernels.pyx b/src/cython/cython/kernels.pyx index 0cb296ec..cb8fc0fd 100644 --- a/src/cython/cython/kernels.pyx +++ b/src/cython/cython/kernels.pyx @@ -34,8 +34,8 @@ cdef extern from "Kernels_interface.h" namespace "Gudhi::persistence_diagram": vector[vector[double]] sw_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) double pss (vector[pair[double, double]], vector[pair[double, double]], double, int) vector[vector[double]] pss_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) - double pwg (vector[pair[double, double]], vector[pair[double, double]], double, int, double, double) - vector[vector[double]] pwg_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int, double, double) + double pwg (vector[pair[double, double]], vector[pair[double, double]], int, string, double, double, double) + vector[vector[double]] pwg_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], int, string, double, double, double) def sliced_wasserstein(diagram_1, diagram_2, sigma = 1, N = 100): """ @@ -65,37 +65,39 @@ def sliced_wasserstein_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): """ return sw_matrix(diagrams_1, diagrams_2, sigma, N) -def persistence_weighted_gaussian(diagram_1, diagram_2, sigma = 1, N = 100, C = 1, p = 1): +def persistence_weighted_gaussian(diagram_1, diagram_2, N = 100, weight = "arctan", sigma = 1.0, C = 1.0, p = 1.0): """ :param diagram_1: The first diagram. :type diagram_1: vector[pair[double, double]] :param diagram_2: The second diagram. :type diagram_2: vector[pair[double, double]] - :param sigma: bandwidth of Gaussian :param N: number of Fourier features - :param C: cost of persistence weight - :param p: power of persistence weight + :param weight: weight to use for the diagram points + :param sigma: bandwidth of Gaussian + :param C: cost of arctan persistence weight + :param p: power of arctan persistence weight :returns: the persistence weighted gaussian kernel. """ - return pwg(diagram_1, diagram_2, sigma, N, C, p) + return pwg(diagram_1, diagram_2, N, weight, sigma, C, p) -def persistence_weighted_gaussian_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100, C = 1, p = 1): +def persistence_weighted_gaussian_matrix(diagrams_1, diagrams_2, N = 100, weight = "arctan", sigma = 1.0, C = 1.0, p = 1.0): """ :param diagram_1: The first set of diagrams. :type diagram_1: vector[vector[pair[double, double]]] :param diagram_2: The second set of diagrams. :type diagram_2: vector[vector[pair[double, double]]] - :param sigma: bandwidth of Gaussian :param N: number of Fourier features - :param C: cost of persistence weight - :param p: power of persistence weight + :param weight: weight to use for the diagram points + :param sigma: bandwidth of Gaussian + :param C: cost of arctan persistence weight + :param p: power of arctan persistence weight :returns: the persistence weighted gaussian kernel matrix. """ - return pwg_matrix(diagrams_1, diagrams_2, sigma, N, C, p) + return pwg_matrix(diagrams_1, diagrams_2, N, weight, sigma, C, p) def persistence_scale_space(diagram_1, diagram_2, sigma = 1, N = 100): """ diff --git a/src/cython/cython/vectors.pyx b/src/cython/cython/vectors.pyx index 42390ae6..af53f739 100644 --- a/src/cython/cython/vectors.pyx +++ b/src/cython/cython/vectors.pyx @@ -58,7 +58,10 @@ def persistence_image(diagram, min_x = 0.0, max_x = 1.0, res_x = 10, min_y = 0.0 :param min_x: Minimum ordinate :param max_x: Maximum ordinate :param res_x: Number of ordinate pixels + :param weight: Weight to use for the diagram points :param sigma: bandwidth of Gaussian + :param C: cost of arctan persistence weight + :param p: power of arctan persistence weight :returns: the persistence image """ diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h index dd46656f..a07d7820 100644 --- a/src/cython/include/Kernels_interface.h +++ b/src/cython/include/Kernels_interface.h @@ -23,6 +23,7 @@ #ifndef INCLUDE_KERNELS_INTERFACE_H_ #define INCLUDE_KERNELS_INTERFACE_H_ +#include #include #include #include @@ -46,9 +47,13 @@ namespace persistence_diagram { return sw1.compute_scalar_product(sw2); } - double pwg(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N, double C, double p) { - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, Gudhi::Persistence_representations::arctan_weight(C,p)); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, Gudhi::Persistence_representations::arctan_weight(C,p)); + double pwg(const std::vector>& diag1, const std::vector>& diag2, int N, std::string weight, double sigma, double C, double p) { + Gudhi::Persistence_representations::Weight weight_fn; + if(weight.compare("linear") == 0) weight_fn = Gudhi::Persistence_representations::linear_weight; + if(weight.compare("arctan") == 0) weight_fn = Gudhi::Persistence_representations::arctan_weight(C,p); + if(weight.compare("const") == 0) weight_fn = Gudhi::Persistence_representations::const_weight; + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, weight_fn); + Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, weight_fn); return pwg1.compute_scalar_product(pwg2); } @@ -87,11 +92,11 @@ namespace persistence_diagram { return matrix; } - std::vector > pwg_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N, double C, double p){ + std::vector > pwg_matrix(const std::vector > >& s1, const std::vector > >& s2, int N, std::string weight, double sigma, double C, double p){ std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); for(int i = 0; i < num_diag_1; i++){ std::cout << 100.0*i/num_diag_1 << " %" << std::endl; - std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pwg(s1[i], s2[j], sigma, N, C, p)); matrix.push_back(ps); + std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pwg(s1[i], s2[j], N, weight, sigma, C, p)); matrix.push_back(ps); } return matrix; } @@ -99,13 +104,13 @@ namespace persistence_diagram { std::vector > pss_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N){ std::vector > > ss1, ss2; std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); for(int i = 0; i < num_diag_1; i++){ - std::vector> pd1 = s1[i]; int numpts = s1[i].size(); + std::vector> pd1 = s1[i]; int numpts = s1[i].size(); for(int j = 0; j < numpts; j++) pd1.emplace_back(s1[i][j].second,s1[i][j].first); ss1.push_back(pd1); } - + for(int i = 0; i < num_diag_2; i++){ - std::vector> pd2 = s2[i]; int numpts = s2[i].size(); + std::vector> pd2 = s2[i]; int numpts = s2[i].size(); for(int j = 0; j < numpts; j++) pd2.emplace_back(s2[i][j].second,s2[i][j].first); ss2.push_back(pd2); } -- cgit v1.2.3 From 516e2942d8529e03311539ad0b001b33e45721f4 Mon Sep 17 00:00:00 2001 From: fgodi Date: Tue, 19 Jun 2018 14:39:12 +0000 Subject: unitary collapses function git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3623 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 26c1fe89a979d9e1953e7916b15faa34e41da97b --- src/Toplex_map/include/gudhi/Toplex_map.h | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index b6bb5381..7a2e5b09 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -2,6 +2,7 @@ #define TOPLEX_MAP_H #include +#include #include #include #include @@ -69,7 +70,7 @@ public: * The edge has to verify the link condition if you want to preserve topology. * Returns the remaining vertex. * \ingroup toplex_map */ - Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y, bool force=false); + Toplex_map::Vertex contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y); /** Adds the given simplex to the complex. * The simplex must not have neither maximal face nor coface in the complex. @@ -89,6 +90,8 @@ public: * \ingroup toplex_map */ std::size_t num_simplices() const; + std::set unitary_collapse(const Toplex_map::Vertex k, const Toplex_map::Vertex d); + protected: /** \internal Gives an index in order to look for a simplex quickly. * \ingroup toplex_map */ @@ -196,18 +199,17 @@ Toplex_map::Simplex_ptr_set Toplex_map::maximal_cofaces(const Input_vertex_range return cofaces; } -Toplex_map::Vertex Toplex_map::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y, bool force){ +Toplex_map::Vertex Toplex_map::contraction(const Toplex_map::Vertex x, const Toplex_map::Vertex y){ if(!t0.count(x)) return y; if(!t0.count(y)) return x; int k, d; - if(force || (t0.at(x).size() > t0.at(y).size())) + if(t0.at(x).size() > t0.at(y).size()) k=x, d=y; else k=y, d=x; for(const Toplex_map::Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ //Copy constructor needed because the set is modified Simplex sigma(*sptr); - Simplex s; s.insert(2); erase_maximal(sptr); sigma.erase(d); sigma.insert(k); @@ -216,12 +218,27 @@ Toplex_map::Vertex Toplex_map::contraction(const Toplex_map::Vertex x, const Top return k; } +std::set Toplex_map::unitary_collapse(const Toplex_map::Vertex k, const Toplex_map::Vertex d){ + std::set r; + for(const Toplex_map::Simplex_ptr& sptr : Simplex_ptr_set(t0.at(d))){ + //Copy constructor needed because the set is modified + Simplex sigma(*sptr); + erase_maximal(sptr); + sigma.erase(d); + for(const Toplex_map::Vertex v : sigma) + r.insert(v); + sigma.insert(k); + insert_simplex(sigma); + } + return r; +} + template void Toplex_map::insert_independent_simplex(const Input_vertex_range &vertex_range){ + auto key = get_key(vertex_range); for(const Toplex_map::Vertex& v : vertex_range){ if(!t0.count(v)) t0.emplace(v, Simplex_ptr_set()); - auto k = get_key(vertex_range); - t0.at(v).emplace(k); + t0.at(v).emplace(key); } } -- cgit v1.2.3 From 0741c3eabbfece1c73ac76aa44adbe2904b6124d Mon Sep 17 00:00:00 2001 From: mcarrier Date: Sat, 23 Jun 2018 04:59:39 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3628 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 12f32a1c8ca31e7e0a40e1c3502e2a3d810d5bc5 --- .../doc/Persistence_representations_doc.h | 131 ++++++--------- .../example/CMakeLists.txt | 20 --- .../example/betti_sequence.cpp | 49 ------ .../example/persistence_heat_maps.cpp | 12 ++ .../example/persistence_heat_maps_exact.cpp | 55 ------- .../persistence_landscape_on_grid_exact.cpp | 52 ------ .../example/persistence_weighted_gaussian.cpp | 99 ----------- .../include/gudhi/Betti_sequence.h | 95 ----------- .../include/gudhi/Persistence_heat_maps.h | 174 ++++++++++++++++---- .../include/gudhi/Persistence_heat_maps_exact.h | 125 -------------- .../include/gudhi/Persistence_landscape_on_grid.h | 6 +- .../gudhi/Persistence_landscape_on_grid_exact.h | 108 ------------ .../include/gudhi/Persistence_weighted_gaussian.h | 182 --------------------- .../include/gudhi/Sliced_Wasserstein.h | 6 +- .../include/gudhi/Weight_functions.h | 81 --------- .../gudhi/common_persistence_representations.h | 15 +- src/cython/cython/kernels.pyx | 128 --------------- src/cython/cython/vectors.pyx | 68 -------- src/cython/include/Kernels_interface.h | 130 --------------- src/cython/include/Vectors_interface.h | 59 ------- 20 files changed, 217 insertions(+), 1378 deletions(-) delete mode 100644 src/Persistence_representations/example/betti_sequence.cpp delete mode 100644 src/Persistence_representations/example/persistence_heat_maps_exact.cpp delete mode 100644 src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp delete mode 100644 src/Persistence_representations/example/persistence_weighted_gaussian.cpp delete mode 100644 src/Persistence_representations/include/gudhi/Betti_sequence.h delete mode 100644 src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h delete mode 100644 src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h delete mode 100644 src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h delete mode 100644 src/Persistence_representations/include/gudhi/Weight_functions.h delete mode 100644 src/cython/cython/kernels.pyx delete mode 100644 src/cython/cython/vectors.pyx delete mode 100644 src/cython/include/Kernels_interface.h delete mode 100644 src/cython/include/Vectors_interface.h diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index d0b02739..73800d0d 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -24,6 +24,7 @@ #define DOC_GUDHI_STAT_H_ namespace Gudhi { + namespace Persistence_representations { /** \defgroup Persistence_representations Persistence representations @@ -128,35 +129,33 @@ namespace Persistence_representations { function \f$L : \mathbb{N} \times \mathbb{R} \to [0,\infty)\f$ of two variables, if we define \f$L(k,t) = \lambda_k(t)\f$. - The detailed description of algorithms used to compute persistence landscapes can be found in \cite bubenik_dlotko_landscapes_2016. - Note that this implementation provides exact representation of landscapes. That have many advantages, but also a few drawbacks. - For instance, as discussed in \cite bubenik_dlotko_landscapes_2016, the exact representation of landscape may be of quadratic size with respect - to the input persistence diagram. It may therefore happen that, for very large diagrams, using this representation may be memory--prohibitive. - In such a case, there are two possible ways to proceed: + The detailed description of algorithms used to compute persistence landscapes can be found in + \cite bubenik_dlotko_landscapes_2016. + Note that this implementation provides exact representation of landscapes. That have many advantages, but also a few + drawbacks. For instance, as discussed + in \cite bubenik_dlotko_landscapes_2016, the exact representation of landscape may be of quadratic size with respect + to the input persistence diagram. It may therefore happen + that, for very large diagrams, using this representation may be memory--prohibitive. In such a case, there are two + possible ways to proceed: - \li Use representation on a grid---see section \ref sec_landscapes_on_grid. + \li Use non exact representation on a grid described in the Section \ref sec_landscapes_on_grid. \li Compute just a number of initial nonzero landscapes. This option is available from C++ level as a last parameter of the constructor of persistence landscape (set by default to std::numeric_limits::max()). \section sec_landscapes_on_grid Persistence Landscapes on a grid - Reference manual: \ref Gudhi::Persistence_representations::Persistence_landscape_on_grid
- Reference manual: \ref Gudhi::Persistence_representations::Persistence_landscape_on_grid_exact
- - Here, we provide alternative, not exact, representations of persistence landscapes defined in Section \ref sec_persistence_landscapes. - Unlike Section \ref sec_persistence_landscapes, we build representations of persistence landscapes by evaluating the landscape functions on a finite, equally distributed grid of points. - We propose two different representations depending on whether the persistence intervals are also mapped on the grid (Persistence_landscape_on_grid) or not (Persistence_landscape_on_grid_exact). - This makes a big difference since mapping the intervals on the grid makes the computation time smaller but only provides an approximation of the landscape values. + This is an alternative, not--exact, representation of persistence landscapes defined in the Section \ref + sec_persistence_landscapes. Unlike in the Section \ref sec_persistence_landscapes we build a + representation of persistence landscape by sampling its values on a finite, equally distributed grid of points. + Since, the persistence landscapes that originate from persistence diagrams have slope \f$1\f$ or \f$-1\f$, we have an + estimate of a region between the grid points where the landscape cab be located. + That allows to estimate an error make when performing various operations on landscape. Note that for average + landscapes the slope is in range \f$[-1,1]\f$ and similar estimate can be used. - Since persistence landscapes originating from persistence diagrams have slope \f$1\f$ or \f$-1\f$, we have an - estimate of a region between the grid points where the landscapes can be located. - That allows to estimate an error made when performing various operations on landscapes. Note that for average - landscapes the slope is in range \f$[-1,1]\f$ and similar estimates can be used. - - Due to the lack of rigorous description of the algorithms for these non rigorous representations of persistence - landscapes in the literature, we provide a short discussion below. + Due to a lack of rigorous description of the algorithms to deal with this non--rigorous representation of persistence + landscapes in the literature, we are providing a short discussion of them in below. Let us assume that we want to compute persistence landscape on a interval \f$[x,y]\f$. Let us assume that we want to use \f$N\f$ grid points for that purpose. @@ -168,11 +167,11 @@ namespace Persistence_representations { functions) on the i-th point of a grid, i.e. \f$x + i \frac{y-x}{N}\f$. When averaging two persistence landscapes represented by a grid we need to make sure that they are defined in a - compatible grids, i.e. the intervals \f$[x,y]\f$ on which they are defined are + compatible grids. I.e. the intervals \f$[x,y]\f$ on which they are defined are the same, and the numbers of grid points \f$N\f$ are the same in both cases. If this is the case, we simply compute - point-wise averages of the entries of the corresponding - vectors (in this whole section we assume that if one vector of numbers is shorter than the other, we extend the shortest - one with zeros so that they have the same length). + point-wise averages of the entries of corresponding + vectors (In this whole section we assume that if one vector of numbers is shorter than another, we extend the shorter + one with zeros so that they have the same length.) Computations of distances between two persistence landscapes on a grid is not much different than in the rigorous case. In this case, we sum up the distances between the same levels of @@ -181,11 +180,11 @@ namespace Persistence_representations { Similarly as in case of distance, when computing the scalar product of two persistence landscapes on a grid, we sum up the scalar products of corresponding levels of landscapes. For each level, - we assume that the persistence landscape on a grid between two grid points is approximated by a linear function. - Therefore to compute the scalar product of two corresponding levels of landscapes, + we assume that the persistence landscape on a grid between two grid points is approximated by linear function. + Therefore to compute scalar product of two corresponding levels of landscapes, we sum up the integrals of products of line segments for every pair of constitutive grid points. - Note that for these representations we need to specify a few parameters: + Note that for this representation we need to specify a few parameters: \li Begin and end point of a grid -- the interval \f$[x,y]\f$ (real numbers). \li Number of points in a grid (positive integer \f$N\f$). @@ -194,33 +193,29 @@ namespace Persistence_representations { Note that the same representation is used in TDA R-package \cite Fasy_Kim_Lecci_Maria_tda. \section sec_persistence_heat_maps Persistence heat maps - Reference manual: \ref Gudhi::Persistence_representations::Persistence_heat_maps
- Reference manual: \ref Gudhi::Persistence_representations::Persistence_heat_maps_exact
- - This is a general class of discrete structures which are based on idea of placing a kernel in the points of persistence diagrams. + This is a general class of discrete structures which are based on idea of placing a kernel in the points of + persistence diagrams. This idea appeared in work by many authors over the last 15 years. As far as we know this idea was firstly described in the work of Bologna group in \cite Ferri_Frosini_comparision_sheme_1 and \cite Ferri_Frosini_comparision_sheme_2. Later it has been described by Colorado State University group in \cite Persistence_Images_2017. The presented paper - in the first time provided a discussion of stability of this representation. - Also, the same ideas are used in the construction of two recent kernels used for machine learning: - \cite Kusano_Fukumizu_Hiraoka_PWGK and \cite Reininghaus_Huber_ALL_PSSK. Both the kernels use - interesting ideas to ensure stability of the representations with respect to the 1-Wasserstein metric. In the kernel + in the first time provide a discussion of stability of the representation. + Also, the same ideas are used in construction of two recent kernels used for machine learning: + \cite Kusano_Fukumizu_Hiraoka_PWGK and \cite Reininghaus_Huber_ALL_PSSK. Both the kernel's construction uses + interesting ideas to ensure stability of the representation with respect to Wasserstein metric. In the kernel presented in \cite Kusano_Fukumizu_Hiraoka_PWGK, a scaling function is used to multiply the Gaussian kernel in the - way that the points close to diagonal have low weights and consequently do not have a big influence on the resulting + way that the points close to diagonal got low weight and consequently do not have a big influence on the resulting distribution. In \cite Reininghaus_Huber_ALL_PSSK for every point \f$(b,d)\f$ two Gaussian kernels are added: first, with a weight 1 in a point \f$(b,d)\f$, and the second, with the weight -1 for a point \f$(b,d)\f$. In both cases, the representations are stable with respect to 1-Wasserstein distance. - In Persistence_representations package, we currently implement a discretization of the distributions described above. - The base of this implementation is a 2-dimensional array of pixels. To each pixel is assigned a real value which - is the sum of the distribution values induced by each point of the persistence diagram. - As for Persistence_landscapes, we propose two different representations depending on whether the persistence intervals are also mapped on the pixels - (Persistence_heat_maps) or not (Persistence_heat_maps_exact). - At the moment we compute the sum over the evaluations of the distributions on the pixel centers. It can be easily extended to any other function - (like for instance the sum of the integrals of the distributions over the pixels). + In Persistence\_representations package we currently implement a discretization of the distributions described above. + The base of this implementation is 2-dimensional array of pixels. Each pixel have assigned a real value which + is a sum of values of distributions induced by each point of the persistence diagram. At the moment we compute the + sum of values on a center of a pixels. It can be easily extended to any other function + (like for instance sum of integrals of the intermediate distribution on a pixel). - Concerning Persistence_heat_maps, the parameters that determine the structure are the following: + The parameters that determine the structure are the following: \li A positive integer k determining the size of the kernel we used (we always assume that the kernels are square). \li A filter: in practice a square matrix of a size \f$2k+1 \times 2k+1\f$. By default, this is a discretization of @@ -232,7 +227,6 @@ namespace Persistence_representations { to diagonal are given then sometimes the kernel have support that reaches the region below the diagonal. If the value of this parameter is true, then the values below diagonal can be erased. - Concerning Persistence_heat_maps_exact, only Gaussian kernels are implemented, so the parameters are the array of pixels, the weight functions for the Gaussians and the bandwidth of the Gaussians. \section sec_persistence_vectors Persistence vectors Reference manual: \ref Gudhi::Persistence_representations::Vector_distances_in_diagram
@@ -256,11 +250,7 @@ namespace Persistence_representations { absolute value of differences between coordinates. A scalar product is a sum of products of values at the corresponding positions of two vectors. - - - - -\section sec_persistence_kernels Kernels on persistence diagrams + \section sec_persistence_kernels Kernels on persistence diagrams Reference manual: \ref Gudhi::Persistence_representations::Sliced_Wasserstein
Reference manual: \ref Gudhi::Persistence_representations::Persistence_weighted_gaussian
@@ -269,53 +259,26 @@ namespace Persistence_representations { between images of these pairs under a map \f$\Phi\f$ taking values in a specific (possibly non Euclidean) Hilbert space \f$k(D_i, D_j) = \langle \Phi(D_i),\Phi(D_j)\rangle\f$. Reciprocally, classical results of learning theory ensure that such a \f$\Phi\f$ exists for a given similarity function \f$k\f$ if and only if \f$k\f$ is positive semi-definite. Kernels are designed for algorithms that can be kernelized, i.e., algorithms that only require to know scalar products between instances in order to run. - Examples of such algorithms include Support Vector Machines, Principal Component Analysis and Ridge Regression. + Examples of such algorithms include Support Vector Machines, Principal Component Analysis and Ridge Regression. There have been several attempts at defining kernels, i.e., positive semi-definite functions, between persistence diagrams within the last few years. We provide implementation - for three of them: - - \li the Persistence Scale Space Kernel---see \cite Reininghaus_Huber_ALL_PSSK, which is the classical scalar product between \f$L^2\f$ functions, where persistence diagrams - are turned into functions by centering and summing Gaussian functions over the diagram points and their symmetric counterparts w.r.t. the diagonal: \f$k(D_1,D_2)=\int \Phi(D_1)\Phi(D_2)\f$, - where \f$\Phi(D)=\sum_{p\in D} {\rm exp}\left(-\frac{\|p-\cdot\|_2^2}{2\sigma^2}\right)\f$. - - \li the Persistence Weighted Gaussian Kernel---see \cite Kusano_Fukumizu_Hiraoka_PWGK, which is a slight generalization of the previous kernel, is the scalar product between - weighted Kernel Mean Embeddings of persistence diagrams w.r.t. the Gaussian Kernel \f$k_G\f$ (with corresponding map \f$\Phi_G\f$) in \f$\mathbb{R}^2\f$: - \f$k(D_1,D_2)=\langle\sum_{p\in D_1} w(p)\Phi_G(p), \sum_{q\in D_2} w(q)\Phi_G(q)\rangle\f$ - - \li the Sliced Wasserstein Kernel---see \cite pmlr-v70-carriere17a, which takes the form of a Gaussian kernel with a specific distance between persistence diagrams - called the Sliced Wasserstein Distance: \f$k(D_1,D_2)={\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right)\f$ + for the Sliced Wasserstein Kernel---see \cite pmlr-v70-carriere17a, which takes the form of a Gaussian kernel with a specific distance between persistence diagrams + called the Sliced Wasserstein Distance: \f$k(D_1,D_2)={\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right)\f$. Other kernels such as the Persistence Weighted Gaussian Kernel or + the Persistence Scale Space Kernel are implemented in Persistence_heat_maps. When launching: \code $> ./Sliced_Wasserstein \endcode - + the program output is: - + \code $> Approx SW distance: 5.33648 $> Exact SW distance: 5.33798 $> Approx SW kernel: 0.0693743 $> Exact SW kernel: 0.0693224 $> Distance induced by approx SW kernel: 1.36428 - $> Distance induced by exact SW kernel: 1.3643 - \endcode - - - and when launching: - - \code $> ./Persistence_weighted_gaussian - \endcode - - the program output is: - - \code $> Approx PWG kernel: 1.21509 - $> Exact PWG kernel: 1.13628 - $> Distance induced by approx PWG kernel: 3.23354 - $> Distance induced by exact PWG kernel: 3.25697 - $> Approx Gaussian PWG kernel: 0.0194222 - $> Exact Gaussian PWG kernel: 0.0192524 - $> Approx PSS kernel: 0.134413 - $> Exact PSS kernel: 0.133394 + $> Distance induced by exact SW kernel: 1.3643 \endcode */ diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index 9be22085..d236c3a6 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -31,23 +31,3 @@ add_executable ( Sliced_Wasserstein sliced_wasserstein.cpp ) add_test(NAME Sliced_Wasserstein COMMAND $) install(TARGETS Sliced_Wasserstein DESTINATION bin) - -add_executable ( Persistence_weighted_gaussian persistence_weighted_gaussian.cpp ) -add_test(NAME Persistence_weighted_gaussian - COMMAND $) -install(TARGETS Persistence_weighted_gaussian DESTINATION bin) - -add_executable ( Persistence_heat_maps_exact persistence_heat_maps_exact.cpp ) -add_test(NAME Persistence_heat_maps_exact - COMMAND $) -install(TARGETS Persistence_heat_maps_exact DESTINATION bin) - -add_executable ( Persistence_landscape_on_grid_exact persistence_landscape_on_grid_exact.cpp ) -add_test(NAME Persistence_landscape_on_grid_exact - COMMAND $) -install(TARGETS Persistence_landscape_on_grid_exact DESTINATION bin) - -add_executable ( Betti_sequence betti_sequence.cpp ) -add_test(NAME Betti_sequence - COMMAND $) -install(TARGETS Betti_sequence DESTINATION bin) diff --git a/src/Persistence_representations/example/betti_sequence.cpp b/src/Persistence_representations/example/betti_sequence.cpp deleted file mode 100644 index a422a822..00000000 --- a/src/Persistence_representations/example/betti_sequence.cpp +++ /dev/null @@ -1,49 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -#include -#include -#include - -using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; -using BS = Gudhi::Persistence_representations::Betti_sequence; - -int main(int argc, char** argv) { - - Persistence_diagram persistence; - - persistence.push_back(std::make_pair(1, 2)); - persistence.push_back(std::make_pair(6, 8)); - persistence.push_back(std::make_pair(0, 4)); - persistence.push_back(std::make_pair(3, 8)); - - double min_x = 0; double max_x = 8; int res_x = 1000; - - BS bs(persistence, min_x, max_x, res_x); - std::vector B = bs.vectorize(); - - for(int i = 0; i < res_x; i++) std::cout << B[i] << ", "; - - return 0; -} diff --git a/src/Persistence_representations/example/persistence_heat_maps.cpp b/src/Persistence_representations/example/persistence_heat_maps.cpp index 323b57e9..f1791e97 100644 --- a/src/Persistence_representations/example/persistence_heat_maps.cpp +++ b/src/Persistence_representations/example/persistence_heat_maps.cpp @@ -21,6 +21,7 @@ */ #include +#include #include #include @@ -76,5 +77,16 @@ int main(int argc, char** argv) { // to compute scalar product of hm1 and hm2: std::cout << "Scalar product is : " << hm1.compute_scalar_product(hm2) << std::endl; + Gudhi::Persistence_representations::Kernel k = Gudhi::Persistence_representations::Gaussian_kernel(1.0); + + Persistence_heat_maps hm1k(persistence1, k); + Persistence_heat_maps hm2k(persistence2, k); + + Persistence_heat_maps hm1i(persistence1, 20, 20, 0, 11, 0, 11, k); + Persistence_heat_maps hm2i(persistence2, 20, 20, 0, 11, 0, 11, k); + + std::cout << "Scalar product computed with exact kernel is : " << hm1i.compute_scalar_product(hm2i) << std::endl; + std::cout << "Kernel value between PDs seen as functions is : " << hm1k.compute_scalar_product(hm2k) << std::endl; + return 0; } diff --git a/src/Persistence_representations/example/persistence_heat_maps_exact.cpp b/src/Persistence_representations/example/persistence_heat_maps_exact.cpp deleted file mode 100644 index f15b710d..00000000 --- a/src/Persistence_representations/example/persistence_heat_maps_exact.cpp +++ /dev/null @@ -1,55 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include -#include - -#include -#include -#include -#include - -using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; -using PI = Gudhi::Persistence_representations::Persistence_heat_maps_exact; -using Weight = std::function) >; - -int main(int argc, char** argv) { - - Persistence_diagram persistence; - - persistence.push_back(std::make_pair(1, 2)); - persistence.push_back(std::make_pair(6, 8)); - persistence.push_back(std::make_pair(0, 4)); - persistence.push_back(std::make_pair(3, 8)); - - double min_x = 0.0; double max_x = 10.0; int res_x = 100; double min_y = 0.0; double max_y = 10.0; int res_y = 100; double sigma = 1.0; Weight weight = Gudhi::Persistence_representations::linear_weight; - - PI pim(persistence, min_x, max_x, res_x, min_y, max_y, res_y, weight, sigma); - std::vector > P = pim.vectorize(); - - for(int i = 0; i < res_y; i++){ - for(int j = 0; j < res_x; j++) std::cout << P[i][j] << " "; - std::cout << std::endl; - } - - return 0; -} diff --git a/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp b/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp deleted file mode 100644 index 9ce42649..00000000 --- a/src/Persistence_representations/example/persistence_landscape_on_grid_exact.cpp +++ /dev/null @@ -1,52 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -#include -#include -#include - -using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; -using LS = Gudhi::Persistence_representations::Persistence_landscape_on_grid_exact; - -int main(int argc, char** argv) { - - Persistence_diagram persistence; - - persistence.push_back(std::make_pair(1, 2)); - persistence.push_back(std::make_pair(6, 8)); - persistence.push_back(std::make_pair(0, 4)); - persistence.push_back(std::make_pair(3, 8)); - - int nb_ls = 2; double min_x = 0; double max_x = 8; int res_x = 1000; - - LS ls(persistence, nb_ls, min_x, max_x, res_x); - std::vector > L = ls.vectorize(); - - for(int i = 0; i < nb_ls; i++){ - for(int j = 0; j < res_x; j++) std::cout << L[i][j] << ", "; - std::cout << std::endl; - } - - return 0; -} diff --git a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp b/src/Persistence_representations/example/persistence_weighted_gaussian.cpp deleted file mode 100644 index 7945e4f1..00000000 --- a/src/Persistence_representations/example/persistence_weighted_gaussian.cpp +++ /dev/null @@ -1,99 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -#include -#include -#include - -using Persistence_diagram = Gudhi::Persistence_representations::Persistence_diagram; -using PWG = Gudhi::Persistence_representations::Persistence_weighted_gaussian; - -int main(int argc, char** argv) { - - Persistence_diagram persistence1, persistence2; - - persistence1.push_back(std::make_pair(1, 2)); - persistence1.push_back(std::make_pair(6, 8)); - persistence1.push_back(std::make_pair(0, 4)); - persistence1.push_back(std::make_pair(3, 8)); - - persistence2.push_back(std::make_pair(2, 9)); - persistence2.push_back(std::make_pair(1, 6)); - persistence2.push_back(std::make_pair(3, 5)); - persistence2.push_back(std::make_pair(6, 10)); - - double sigma = 1; - double tau = 1; - int m = 10000; - - PWG PWG1(persistence1, sigma, m, Gudhi::Persistence_representations::arctan_weight(1,1)); - PWG PWG2(persistence2, sigma, m, Gudhi::Persistence_representations::arctan_weight(1,1)); - - PWG PWGex1(persistence1, sigma, -1, Gudhi::Persistence_representations::arctan_weight(1,1)); - PWG PWGex2(persistence2, sigma, -1, Gudhi::Persistence_representations::arctan_weight(1,1)); - - - // Linear PWG - - std::cout << "Approx PWG kernel: " << PWG1.compute_scalar_product (PWG2) << std::endl; - std::cout << "Exact PWG kernel: " << PWGex1.compute_scalar_product (PWGex2) << std::endl; - - std::cout << "Distance induced by approx PWG kernel: " << PWG1.distance (PWG2) << std::endl; - std::cout << "Distance induced by exact PWG kernel: " << PWGex1.distance (PWGex2) << std::endl; - - - - - - - - // Gaussian PWG - - std::cout << "Approx Gaussian PWG kernel: " << std::exp( -PWG1.distance (PWG2) ) / (2*tau*tau) << std::endl; - std::cout << "Exact Gaussian PWG kernel: " << std::exp( -PWGex1.distance (PWGex2) ) / (2*tau*tau) << std::endl; - - - - - - - - // PSS - - Persistence_diagram pd1 = persistence1; int numpts = persistence1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(persistence1[i].second,persistence1[i].first); - Persistence_diagram pd2 = persistence2; numpts = persistence2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(persistence2[i].second,persistence2[i].first); - - PWG pwg1(pd1, 2*std::sqrt(sigma), m, Gudhi::Persistence_representations::pss_weight); - PWG pwg2(pd2, 2*std::sqrt(sigma), m, Gudhi::Persistence_representations::pss_weight); - - PWG pwgex1(pd1, 2*std::sqrt(sigma), -1, Gudhi::Persistence_representations::pss_weight); - PWG pwgex2(pd2, 2*std::sqrt(sigma), -1, Gudhi::Persistence_representations::pss_weight); - - std::cout << "Approx PSS kernel: " << pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma) << std::endl; - std::cout << "Exact PSS kernel: " << pwgex1.compute_scalar_product (pwgex2) / (16*Gudhi::Persistence_representations::pi*sigma) << std::endl; - - - - return 0; -} diff --git a/src/Persistence_representations/include/gudhi/Betti_sequence.h b/src/Persistence_representations/include/gudhi/Betti_sequence.h deleted file mode 100644 index 57c52ad2..00000000 --- a/src/Persistence_representations/include/gudhi/Betti_sequence.h +++ /dev/null @@ -1,95 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef BETTI_SEQUENCE_H_ -#define BETTI_SEQUENCE_H_ - -// gudhi include -#include -#include -#include - -// standard include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace Gudhi { -namespace Persistence_representations { - -/** - * \class Betti_sequence gudhi/Betti_sequence.h - * \brief A class implementing Betti sequences - * - * \ingroup Persistence_representations - * - * \details -**/ - -class Betti_sequence { - - protected: - Persistence_diagram diagram; - int res_x, nb_cv; - double min_x, max_x; - - public: - - /** \brief Betti_sequence constructor. - * \ingroup Betti_sequence - * - * @param[in] _diagram persistence diagram. - * @param[in] _min_x minimum value of samples. - * @param[in] _max_x maximum value of samples. - * @param[in] _res_x number of samples. - * - */ - Betti_sequence(const Persistence_diagram & _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; min_x = _min_x; max_x = _max_x; res_x = _res_x;} - - /** \brief Computes the Betti sequences of a diagram. - * \ingroup Betti_sequence - * - */ - std::vector vectorize() const { - int num_pts = diagram.size(); double step = (max_x - min_x)/(res_x - 1); - std::vector bs(res_x); for(int i = 0; i < res_x; i++) bs[i] = 0; - for(int j = 0; j < num_pts; j++){ - double px = diagram[j].first; double py = diagram[j].second; - int first = std::ceil((px-min_x)/step); int last = std::ceil((py-min_x)/step); - for(int i = first; i < last; i++) bs[i] += 1; - } - - return bs; - } - -}; // class Betti_sequence -} // namespace Persistence_representations -} // namespace Gudhi - -#endif // BETTI_SEQUENCE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h index 35e51e63..63c6e239 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h +++ b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h @@ -244,6 +244,20 @@ class Persistence_heat_maps { double max_ = std::numeric_limits::max(), unsigned dimension = std::numeric_limits::max()); + /** + * Construction that takes as inputs (1) the diagram, (2) grid parameters (min, max and number of samples for x and y axes), and (3) a universal kernel on the plane used + * to turn the diagram into a function. + **/ + Persistence_heat_maps(const Persistence_diagram & interval, size_t number_of_x_pixels, size_t number_of_y_pixels, + double min_x = 0, double max_x = 1, double min_y = 0, double max_y = 1, const Kernel & kernel = Gaussian_kernel(1.0)); + + /** + * Construction that takes as inputs (1) the diagram and (2) a universal kernel on the plane used + * to turn the diagram into a function. Note that this construction is infinite dimensional so + * only compute_scalar_product() method is valid after calling this constructor. + **/ + Persistence_heat_maps(const Persistence_diagram & interval, const Kernel & kernel = Gaussian_kernel(1.0)); + /** * Compute a mean value of a collection of heat maps and store it in the current object. Note that all the persistence *maps send in a vector to this procedure need to have the same parameters. @@ -512,15 +526,27 @@ class Persistence_heat_maps { size_t number_of_functions_for_projections_to_reals; void construct(const std::vector >& intervals_, std::vector > filter = create_Gaussian_filter(5, 1), - bool erase_below_diagonal = false, size_t number_of_pixels = 1000, double min_ = std::numeric_limits::max(), double max_ = std::numeric_limits::max()); + void construct_image_from_exact_universal_kernel(const Persistence_diagram & interval, + size_t number_of_x_pixels = 10, size_t number_of_y_pixels = 10, + double min_x = 0, double max_x = 1, double min_y = 0, double max_y = 1, const Kernel & kernel = Gaussian_kernel(1.0)); + void construct_kernel_from_exact_universal_kernel(const Persistence_diagram & interval, const Kernel & kernel = Gaussian_kernel(1.0)); + void set_up_parameters_for_basic_classes() { this->number_of_functions_for_vectorization = 1; this->number_of_functions_for_projections_to_reals = 1; } + // Boolean indicating if we are computing persistence image (true) or persistence weighted gaussian kernel (false) + bool discrete = true; + + // PWGK + Kernel k; + Persistence_diagram d; + std::vector weights; + // data Scalling_of_kernels f; bool erase_below_diagonal; @@ -529,6 +555,59 @@ class Persistence_heat_maps { std::vector > heat_map; }; +template +void Persistence_heat_maps::construct_image_from_exact_universal_kernel(const Persistence_diagram & diagram, + size_t number_of_x_pixels, size_t number_of_y_pixels, + double min_x, double max_x, + double min_y, double max_y, const Kernel & kernel) { + + this->discrete = true; Scalling_of_kernels f; this->f = f; this->min_ = min_x; this->max_ = max_x; + for(size_t i = 0; i < number_of_y_pixels; i++) this->heat_map.emplace_back(); + double step_x = (max_x - min_x)/(number_of_x_pixels - 1); double step_y = (max_y - min_y)/(number_of_y_pixels - 1); + + int num_pts = diagram.size(); + + for(size_t i = 0; i < number_of_y_pixels; i++){ + double y = min_y + i*step_y; + for(size_t j = 0; j < number_of_x_pixels; j++){ + double x = min_x + j*step_x; + + std::pair grid_point(x,y); double pixel_value = 0; + for(int k = 0; k < num_pts; k++){ + double px = diagram[k].first; double py = diagram[k].second; std::pair diagram_point(px,py); + pixel_value += this->f(diagram_point) * kernel(diagram_point, grid_point); + } + this->heat_map[i].push_back(pixel_value); + + } + } + +} + + +template +Persistence_heat_maps::Persistence_heat_maps(const Persistence_diagram & diagram, + size_t number_of_x_pixels, size_t number_of_y_pixels, + double min_x, double max_x, + double min_y, double max_y, const Kernel & kernel) { + this->construct_image_from_exact_universal_kernel(diagram, number_of_x_pixels, number_of_y_pixels, min_x, max_x, min_y, max_y, kernel); + this->set_up_parameters_for_basic_classes(); +} + +template +void Persistence_heat_maps::construct_kernel_from_exact_universal_kernel(const Persistence_diagram & diagram, const Kernel & kernel){ + this->discrete = false; Scalling_of_kernels f; this->f = f; this->k = kernel; this->d = diagram; + int num_pts = this->d.size(); + for (int i = 0; i < num_pts; i++) this->weights.push_back(this->f(this->d[i])); +} + + +template +Persistence_heat_maps::Persistence_heat_maps(const Persistence_diagram& diagram, const Kernel & kernel) { + this->construct_kernel_from_exact_universal_kernel(diagram, kernel); + this->set_up_parameters_for_basic_classes(); +} + // if min_ == max_, then the program is requested to set up the values itself based on persistence intervals template void Persistence_heat_maps::construct(const std::vector >& intervals_, @@ -826,13 +905,16 @@ void Persistence_heat_maps::load_from_file(const char* file // Concretizations of virtual methods: template std::vector Persistence_heat_maps::vectorize(int number_of_function) const { + + std::vector result; + if(!discrete){std::cout << "No vectorize method in case of infinite dimensional vectorization" << std::endl; return result;} + // convert this->heat_map into one large vector: size_t size_of_result = 0; for (size_t i = 0; i != this->heat_map.size(); ++i) { size_of_result += this->heat_map[i].size(); } - std::vector result; result.reserve(size_of_result); for (size_t i = 0; i != this->heat_map.size(); ++i) { @@ -846,34 +928,39 @@ std::vector Persistence_heat_maps::vectorize(int nu template double Persistence_heat_maps::distance(const Persistence_heat_maps& second, double power) const { - // first we need to check if (*this) and second are defined on the same domain and have the same dimensions: - if (!this->check_if_the_same(second)) { - std::cerr << "The persistence images are of non compatible sizes. We cannot therefore compute distance between " - "them. The program will now terminate"; - throw "The persistence images are of non compatible sizes. The program will now terminate"; - } + if(this->discrete){ + // first we need to check if (*this) and second are defined on the same domain and have the same dimensions: + if (!this->check_if_the_same(second)) { + std::cerr << "The persistence images are of non compatible sizes. We cannot therefore compute distance between " + "them. The program will now terminate"; + throw "The persistence images are of non compatible sizes. The program will now terminate"; + } - // if we are here, we know that the two persistence images are defined on the same domain, so we can start computing - // their distances: + // if we are here, we know that the two persistence images are defined on the same domain, so we can start computing their distances: - double distance = 0; - if (power < std::numeric_limits::max()) { - for (size_t i = 0; i != this->heat_map.size(); ++i) { - for (size_t j = 0; j != this->heat_map[i].size(); ++j) { - distance += pow(fabs(this->heat_map[i][j] - second.heat_map[i][j]), power); + double distance = 0; + if (power < std::numeric_limits::max()) { + for (size_t i = 0; i != this->heat_map.size(); ++i) { + for (size_t j = 0; j != this->heat_map[i].size(); ++j) { + distance += pow(fabs(this->heat_map[i][j] - second.heat_map[i][j]), power); + } } - } - } else { - // in this case, we compute max norm distance - for (size_t i = 0; i != this->heat_map.size(); ++i) { - for (size_t j = 0; j != this->heat_map[i].size(); ++j) { - if (distance < fabs(this->heat_map[i][j] - second.heat_map[i][j])) { - distance = fabs(this->heat_map[i][j] - second.heat_map[i][j]); + } else { + // in this case, we compute max norm distance + for (size_t i = 0; i != this->heat_map.size(); ++i) { + for (size_t j = 0; j != this->heat_map[i].size(); ++j) { + if (distance < fabs(this->heat_map[i][j] - second.heat_map[i][j])) { + distance = fabs(this->heat_map[i][j] - second.heat_map[i][j]); + } } } } + return distance; + } else { + + return std::sqrt(this->compute_scalar_product(*this) + second.compute_scalar_product(second) -2 * this->compute_scalar_product(second)); + } - return distance; } template @@ -895,22 +982,37 @@ void Persistence_heat_maps::compute_average( template double Persistence_heat_maps::compute_scalar_product(const Persistence_heat_maps& second) const { - // first we need to check if (*this) and second are defined on the same domain and have the same dimensions: - if (!this->check_if_the_same(second)) { - std::cerr << "The persistence images are of non compatible sizes. We cannot therefore compute distance between " - "them. The program will now terminate"; - throw "The persistence images are of non compatible sizes. The program will now terminate"; - } - // if we are here, we know that the two persistence images are defined on the same domain, so we can start computing - // their scalar product: - double scalar_prod = 0; - for (size_t i = 0; i != this->heat_map.size(); ++i) { - for (size_t j = 0; j != this->heat_map[i].size(); ++j) { - scalar_prod += this->heat_map[i][j] * second.heat_map[i][j]; + if(discrete){ + // first we need to check if (*this) and second are defined on the same domain and have the same dimensions: + if (!this->check_if_the_same(second)) { + std::cerr << "The persistence images are of non compatible sizes. We cannot therefore compute distance between " + "them. The program will now terminate"; + throw "The persistence images are of non compatible sizes. The program will now terminate"; } + + // if we are here, we know that the two persistence images are defined on the same domain, so we can start computing + // their scalar product: + double scalar_prod = 0; + for (size_t i = 0; i != this->heat_map.size(); ++i) { + for (size_t j = 0; j != this->heat_map[i].size(); ++j) { + scalar_prod += this->heat_map[i][j] * second.heat_map[i][j]; + } + } + return scalar_prod; } - return scalar_prod; + + else{ + GUDHI_CHECK(this->approx != second.approx || this->f != second.f, std::invalid_argument("Error: different values for representations")); + + int num_pts1 = this->d.size(); int num_pts2 = second.d.size(); double kernel_val = 0; + for(int i = 0; i < num_pts1; i++) + for(int j = 0; j < num_pts2; j++) + kernel_val += this->weights[i] * second.weights[j] * this->k(this->d[i], second.d[j]); + return kernel_val; + } + + } } // namespace Persistence_representations diff --git a/src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h b/src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h deleted file mode 100644 index 7c5b2fdc..00000000 --- a/src/Persistence_representations/include/gudhi/Persistence_heat_maps_exact.h +++ /dev/null @@ -1,125 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef PERSISTENCE_HEAT_MAPS_EXACT_H_ -#define PERSISTENCE_HEAT_MAPS_EXACT_H_ - -// gudhi include -#include -#include -#include -#include - -// standard include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace Gudhi { -namespace Persistence_representations { - -/** - * \class Persistence_heat_maps_exact gudhi/Persistence_heat_maps_exact.h - * \brief A class implementing exact persistence heat maps. - * - * \ingroup Persistence_representations - * - * \details - * - * In this class, we propose a way to approximate persistence heat maps, or persistence surfaces, by centering weighted Gaussians on each point of the persistence diagram, and evaluating these (exact) weighted Gaussian functions - * on the pixels of a 2D grid. Note that this scheme is different from the one proposed in Persistence_heat_maps, which first maps the points of the diagram to a 2D grid, and then evaluates the (approximate) weighted Gaussian functions. - * Hence, the difference is that we do not modify the diagram in this implementation, but the code can be slower to run. -**/ - -class Persistence_heat_maps_exact { - - protected: - Persistence_diagram diagram; - int res_x, res_y; - double min_x, max_x, min_y, max_y; - Weight weight; - double sigma; - - public: - - /** \brief Persistence_heat_maps_exact constructor. - * \ingroup Persistence_heat_maps_exact - * - * @param[in] _diagram persistence diagram. - * @param[in] _min_x minimum value of pixel abscissa. - * @param[in] _max_x maximum value of pixel abscissa. - * @param[in] _res_x number of pixels for the x-direction. - * @param[in] _min_y minimum value of pixel ordinate. - * @param[in] _max_y maximum value of pixel ordinate. - * @param[in] _res_y number of pixels for the y-direction. - * @param[in] _weight weight function for the Gaussians. - * @param[in] _sigma bandwidth parameter for the Gaussians. - * - */ - Persistence_heat_maps_exact(const Persistence_diagram & _diagram, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10, double _min_y = 0.0, double _max_y = 1.0, int _res_y = 10, const Weight & _weight = arctan_weight(1,1), double _sigma = 1.0){ - diagram = _diagram; min_x = _min_x; max_x = _max_x; res_x = _res_x; min_y = _min_y; max_y = _max_y; res_y = _res_y, weight = _weight; sigma = _sigma; - } - - /** \brief Computes the persistence image of a diagram. - * \ingroup Persistence_heat_maps_exact - * - */ - std::vector > vectorize() const { - std::vector > im; for(int i = 0; i < res_y; i++) im.emplace_back(); - double step_x = (max_x - min_x)/(res_x - 1); double step_y = (max_y - min_y)/(res_y - 1); - - int num_pts = diagram.size(); - - for(int i = 0; i < res_y; i++){ - double y = min_y + i*step_y; - for(int j = 0; j < res_x; j++){ - double x = min_x + j*step_x; - - double pixel_value = 0; - for(int k = 0; k < num_pts; k++){ - double px = diagram[k].first; double py = diagram[k].second; - pixel_value += weight(std::pair(px,py)) * std::exp( -((x-px)*(x-px) + (y-(py-px))*(y-(py-px))) / (2*sigma*sigma) ) / (sigma*std::sqrt(2*pi)); - } - im[i].push_back(pixel_value); - - } - } - - return im; - - } - - - - -}; // class Persistence_heat_maps_exact -} // namespace Persistence_representations -} // namespace Gudhi - -#endif // PERSISTENCE_HEAT_MAPS_EXACT_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h index fd8a181c..db0e362a 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h +++ b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h @@ -986,7 +986,7 @@ void Persistence_landscape_on_grid::set_up_values_of_landscapes(const std::vecto for (size_t int_no = 0; int_no != p.size(); ++int_no) { size_t grid_interval_begin = (p[int_no].first - grid_min_) / dx; size_t grid_interval_end = (p[int_no].second - grid_min_) / dx; - size_t grid_interval_midpoint = (size_t)(0.5 * (grid_interval_begin + grid_interval_end)); + size_t grid_interval_midpoint = (size_t)(0.5 * (p[int_no].first + p[int_no].second) - grid_min + 1); if (dbg) { std::cerr << "Considering an interval : " << p[int_no].first << "," << p[int_no].second << std::endl; @@ -996,7 +996,7 @@ void Persistence_landscape_on_grid::set_up_values_of_landscapes(const std::vecto std::cerr << "grid_interval_midpoint : " << grid_interval_midpoint << std::endl; } - double landscape_value = dx; + double landscape_value = grid_min + dx * (grid_interval_begin + 1) - p[int_no].first; for (size_t i = grid_interval_begin + 1; i < grid_interval_midpoint; ++i) { if (dbg) { std::cerr << "Adding landscape value (going up) for a point : " << i << " equal : " << landscape_value @@ -1030,6 +1030,8 @@ void Persistence_landscape_on_grid::set_up_values_of_landscapes(const std::vecto } landscape_value += dx; } + + landscape_value = p[int_no].second - grid_min - dx * grid_interval_midpoint; for (size_t i = grid_interval_midpoint; i <= grid_interval_end; ++i) { if (landscape_value > 0) { if (number_of_levels != std::numeric_limits::max()) { diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h deleted file mode 100644 index 52f24195..00000000 --- a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid_exact.h +++ /dev/null @@ -1,108 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef LANDSCAPE_H_ -#define LANDSCAPE_H_ - -// gudhi include -#include -#include -#include - -// standard include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace Gudhi { -namespace Persistence_representations { - -/** - * \class Persistence_landscape_on_grid_exact gudhi/Persistence_landscape_on_grid_exact.h - * \brief A class implementing exact persistence landscapes by approximating them on a collection of grid points - * - * \ingroup Persistence_representations - * - * \details - * In this class, we propose a way to approximate landscapes by sampling the x-axis of the persistence diagram and evaluating the (exact) landscape functions on the sample projections onto the diagonal. Note that this is a different approximation scheme - * from the one proposed in Persistence_landscape_on_grid, which puts a grid on the diagonal, maps the persistence intervals on this grid and computes the (approximate) landscape functions on the samples. - * Hence, the difference is that we do not modify the diagram in this implementation, but the code can be slower to run. -**/ - -class Persistence_landscape_on_grid_exact { - - protected: - Persistence_diagram diagram; - int res_x, nb_ls; - double min_x, max_x; - - public: - - /** \brief Persistence_landscape_on_grid_exact constructor. - * \ingroup Persistence_landscape_on_grid_exact - * - * @param[in] _diagram persistence diagram. - * @param[in] _nb_ls number of landscape functions. - * @param[in] _min_x minimum value of samples. - * @param[in] _max_x maximum value of samples. - * @param[in] _res_x number of samples. - * - */ - Persistence_landscape_on_grid_exact(const Persistence_diagram & _diagram, int _nb_ls = 5, double _min_x = 0.0, double _max_x = 1.0, int _res_x = 10){diagram = _diagram; nb_ls = _nb_ls; min_x = _min_x; max_x = _max_x; res_x = _res_x;} - - /** \brief Computes the landscape approximation of a diagram. - * \ingroup Persistence_landscape_on_grid_exact - * - */ - std::vector > vectorize() const { - std::vector > ls; for(int i = 0; i < nb_ls; i++) ls.emplace_back(); - int num_pts = diagram.size(); double step = (max_x - min_x)/(res_x - 1); - - std::vector > ls_t; for(int i = 0; i < res_x; i++) ls_t.emplace_back(); - for(int j = 0; j < num_pts; j++){ - double px = diagram[j].first; double py = diagram[j].second; double mid = (px+py)/2; - int first = std::ceil((px-min_x)/step); int middle = std::ceil((mid-min_x)/step); int last = std::ceil((py-min_x)/step); double x = min_x + first*step; - for(int i = first; i < middle; i++){ double value = std::sqrt(2)*(x-px); ls_t[i].push_back(value); x += step; } - for(int i = middle; i < last; i++){ double value = std::sqrt(2)*(py-x); ls_t[i].push_back(value); x += step; } - } - - for(int i = 0; i < res_x; i++){ - std::sort(ls_t[i].begin(), ls_t[i].end(), [](const double & a, const double & b){return a > b;}); - int nb_events_i = ls_t[i].size(); - for (int j = 0; j < nb_ls; j++){ if(j < nb_events_i) ls[j].push_back(ls_t[i][j]); else ls[j].push_back(0); } - } - - return ls; - } - -}; // class Persistence_landscape_on_grid_exact -} // namespace Persistence_representations -} // namespace Gudhi - -#endif // LANDSCAPE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h b/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h deleted file mode 100644 index 9ef47bf1..00000000 --- a/src/Persistence_representations/include/gudhi/Persistence_weighted_gaussian.h +++ /dev/null @@ -1,182 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef PERSISTENCE_WEIGHTED_GAUSSIAN_H_ -#define PERSISTENCE_WEIGHTED_GAUSSIAN_H_ - -// gudhi include -#include -#include -#include - -// standard include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace Gudhi { -namespace Persistence_representations { -/** - * \class Persistence_weighted_gaussian gudhi/Persistence_weighted_gaussian.h - * \brief A class implementing the Persistence Weighted Gaussian kernel and a specific case thereof called the Persistence Scale Space kernel. - * - * \ingroup Persistence_representations - * - * \details - * The Persistence Weighted Gaussian kernel is built with Gaussian Kernel Mean Embedding, meaning that each persistence diagram is first - * sent to the Hilbert space of a Gaussian kernel with bandwidth parameter \f$\sigma >0\f$ using a weighted mean embedding \f$\Phi\f$: - * - * \f$ \Phi\,:\,D\,\rightarrow\,\sum_{p\in D}\,w(p)\,{\rm exp}\left(-\frac{\|p-\cdot\|_2^2}{2\sigma^2}\right) \f$, - * - * Usually, the weight function is chosen to be an arctan function of the distance of the point to the diagonal: - * \f$w(p) = {\rm arctan}(C\,|y-x|^\alpha)\f$, for some parameters \f$C,\alpha >0\f$. - * Then, their scalar product in this space is computed: - * - * \f$ k(D_1,D_2)=\langle\Phi(D_1),\Phi(D_2)\rangle - * \,=\,\sum_{p\in D_1}\,\sum_{q\in D_2}\,w(p)\,w(q)\,{\rm exp}\left(-\frac{\|p-q\|_2^2}{2\sigma^2}\right).\f$ - * - * Note that one may apply a second Gaussian kernel to their distance in this space and still get a kernel. - * - * It follows that the computation time is \f$O(n^2)\f$ where \f$n\f$ is the number of points - * in the diagrams. This time can be improved by computing approximations of the kernel - * with \f$m\f$ Fourier features \cite Rahimi07randomfeatures. In that case, the computation time becomes \f$O(mn)\f$. - * - * The Persistence Scale Space kernel is a Persistence Weighted Gaussian kernel between modified diagrams: - * the symmetric of each point with respect to the diagonal is first added in each diagram, and then the weight function - * is set to be +1 if the point is above the diagonal and -1 otherwise. - * - * For more details, please see \cite Kusano_Fukumizu_Hiraoka_PWGK - * and \cite Reininghaus_Huber_ALL_PSSK . - * -**/ -class Persistence_weighted_gaussian{ - - protected: - Persistence_diagram diagram; - Weight weight; - double sigma; - int approx; - - public: - - /** \brief Persistence Weighted Gaussian kernel constructor. - * \ingroup Persistence_weighted_gaussian - * - * @param[in] _diagram persistence diagram. - * @param[in] _sigma bandwidth parameter of the Gaussian kernel used for the Kernel Mean Embedding of the diagrams. - * @param[in] _approx number of random Fourier features in case of approximate computation, set to -1 for exact computation. - * @param[in] _weight weight function for the points in the diagrams. - * - */ - Persistence_weighted_gaussian(const Persistence_diagram & _diagram, double _sigma = 1.0, int _approx = 1000, const Weight & _weight = arctan_weight(1,1)){diagram = _diagram; sigma = _sigma; approx = _approx; weight = _weight;} - - - // ********************************** - // Utils. - // ********************************** - - std::vector > Fourier_feat(const Persistence_diagram & diag, const std::vector > & z, const Weight & weight = arctan_weight(1,1)) const { - int md = diag.size(); std::vector > b; int mz = z.size(); - for(int i = 0; i < mz; i++){ - double d1 = 0; double d2 = 0; double zx = z[i].first; double zy = z[i].second; - for(int j = 0; j < md; j++){ - double x = diag[j].first; double y = diag[j].second; - d1 += weight(diag[j])*cos(x*zx + y*zy); - d2 += weight(diag[j])*sin(x*zx + y*zy); - } - b.emplace_back(d1,d2); - } - return b; - } - - std::vector > random_Fourier(double sigma, int m = 1000) const { - std::normal_distribution distrib(0,1); std::vector > z; std::random_device rd; - for(int i = 0; i < m; i++){ - std::mt19937 e1(rd()); std::mt19937 e2(rd()); - double zx = distrib(e1); double zy = distrib(e2); - z.emplace_back(zx/sigma,zy/sigma); - } - return z; - } - - - - // ********************************** - // Scalar product + distance. - // ********************************** - - /** \brief Evaluation of the kernel on a pair of diagrams. - * \ingroup Persistence_weighted_gaussian - * - * @pre sigma, approx and weight attributes need to be the same for both instances. - * @param[in] second other instance of class Persistence_weighted_gaussian. - * - */ - double compute_scalar_product(const Persistence_weighted_gaussian & second) const { - - GUDHI_CHECK(this->sigma != second.sigma || this->approx != second.approx || this->weight != second.weight, std::invalid_argument("Error: different values for representations")); - Persistence_diagram diagram1 = this->diagram; Persistence_diagram diagram2 = second.diagram; - - if(this->approx == -1){ - int num_pts1 = diagram1.size(); int num_pts2 = diagram2.size(); double k = 0; - for(int i = 0; i < num_pts1; i++) - for(int j = 0; j < num_pts2; j++) - k += this->weight(diagram1[i])*this->weight(diagram2[j])*exp(-((diagram1[i].first - diagram2[j].first) * (diagram1[i].first - diagram2[j].first) + - (diagram1[i].second - diagram2[j].second) * (diagram1[i].second - diagram2[j].second)) - /(2*this->sigma*this->sigma)); - return k; - } - else{ - std::vector > z = random_Fourier(this->sigma, this->approx); - std::vector > b1 = Fourier_feat(diagram1,z,this->weight); - std::vector > b2 = Fourier_feat(diagram2,z,this->weight); - double d = 0; for(int i = 0; i < this->approx; i++) d += b1[i].first*b2[i].first + b1[i].second*b2[i].second; - return d/this->approx; - } - } - - /** \brief Evaluation of the distance between images of diagrams in the Hilbert space of the kernel. - * \ingroup Persistence_weighted_gaussian - * - * @pre sigma, approx and weight attributes need to be the same for both instances. - * @param[in] second other instance of class Persistence_weighted_gaussian. - * - */ - double distance(const Persistence_weighted_gaussian & second) const { - GUDHI_CHECK(this->sigma != second.sigma || this->approx != second.approx || this->weight != second.weight, std::invalid_argument("Error: different values for representations")); - return std::pow(this->compute_scalar_product(*this) + second.compute_scalar_product(second)-2*this->compute_scalar_product(second), 0.5); - } - - -}; // class Persistence_weighted_gaussian -} // namespace Persistence_representations -} // namespace Gudhi - -#endif // PERSISTENCE_WEIGHTED_GAUSSIAN_H_ diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index d8ed0d98..8c92ab54 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -62,7 +62,7 @@ namespace Persistence_representations { * in the diagrams, or approximated by sampling \f$N\f$ lines in the circle in \f$O(Nn{\rm log}(n))\f$ time. The Sliced Wasserstein Kernel is then computed as: * * \f$ k(D_1,D_2) = {\rm exp}\left(-\frac{SW(D_1,D_2)}{2\sigma^2}\right).\f$ - * + * * For more details, please see \cite pmlr-v70-carriere17a . * **/ @@ -80,7 +80,7 @@ class Sliced_Wasserstein { void build_rep(){ if(approx > 0){ - + double step = pi/this->approx; int n = diagram.size(); @@ -188,7 +188,7 @@ class Sliced_Wasserstein { * \ingroup Sliced_Wasserstein * * @pre approx attribute needs to be the same for both instances. - * @param[in] second other instance of class Sliced_Wasserstein. + * @param[in] second other instance of class Sliced_Wasserstein. * * */ diff --git a/src/Persistence_representations/include/gudhi/Weight_functions.h b/src/Persistence_representations/include/gudhi/Weight_functions.h deleted file mode 100644 index 78de406d..00000000 --- a/src/Persistence_representations/include/gudhi/Weight_functions.h +++ /dev/null @@ -1,81 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef WEIGHT_FUNCTIONS_H_ -#define WEIGHT_FUNCTIONS_H_ - -// gudhi include -#include -#include - -// standard include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace Gudhi { -namespace Persistence_representations { - -/** \fn static double pss_weight(std::pair p) - * \brief Persistence Scale Space kernel weight function. - * \ingroup Persistence_representations - * - * @param[in] p point in 2D. - */ -static double pss_weight(std::pair p) {if(p.second > p.first) return 1; else return -1;} - -/** \fn static double linear_weight(std::pair p) - * \brief Linear weight function. - * \ingroup Persistence_representations - * - * @param[in] p point in 2D. - */ -static double linear_weight(std::pair p) {return std::abs(p.second - p.first);} - -/** \fn static double const_weight(std::pair p) - * \brief Constant weight function. - * \ingroup Persistence_representations - * - * @param[in] p point in 2D. - */ -static double const_weight(std::pair p) {return 1;} - -/** \fn static std::function) > arctan_weight(double C, double alpha) - * \brief Returns the arctan weight function with parameters C and alpha. - * \ingroup Persistence_representations - * - * @param[in] C positive constant. - * @param[in] alpha positive power. - */ -static std::function) > arctan_weight(double C, double alpha) {return [=](std::pair p){return C * atan(std::pow(std::abs(p.second - p.first), alpha));};} - -} // namespace Persistence_representations -} // namespace Gudhi - -#endif // WEIGHT_FUNCTIONS_H_ diff --git a/src/Persistence_representations/include/gudhi/common_persistence_representations.h b/src/Persistence_representations/include/gudhi/common_persistence_representations.h index 539eee60..024c99ec 100644 --- a/src/Persistence_representations/include/gudhi/common_persistence_representations.h +++ b/src/Persistence_representations/include/gudhi/common_persistence_representations.h @@ -40,12 +40,23 @@ static constexpr double pi = boost::math::constants::pi(); /** * In this module, we use the name Persistence_diagram for the representation of a diagram in a vector of pairs of two double. */ -using Persistence_diagram = std::vector >; +using Persistence_diagram = std::vector >; /** * In this module, we use the name Weight for the representation of a function taking a pair of two double and returning a double. */ -using Weight = std::function) >; +using Weight = std::function) >; +using Kernel = std::function, std::pair )>; + +Kernel Gaussian_kernel(double sigma){ + return [=](std::pair p, std::pair q){return std::exp( -((p.first-q.first)*(p.first-q.first) + (p.second-q.second)*(p.second-q.second)) / (sigma*sigma) );}; +} + +Kernel polynomial_kernel(double c, double d){ + return [=](std::pair p, std::pair q){return std::pow( p.first*q.first + p.second*q.second + c, d);}; +} + + // double epsi = std::numeric_limits::epsilon(); double epsi = 0.000005; diff --git a/src/cython/cython/kernels.pyx b/src/cython/cython/kernels.pyx deleted file mode 100644 index cb8fc0fd..00000000 --- a/src/cython/cython/kernels.pyx +++ /dev/null @@ -1,128 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -import os - -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Mathieu Carriere - - Copyright (C) 2018 INRIA - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . -""" - -__author__ = "Mathieu Carriere" -__copyright__ = "Copyright (C) 2018 INRIA" -__license__ = "GPL v3" - -cdef extern from "Kernels_interface.h" namespace "Gudhi::persistence_diagram": - double sw (vector[pair[double, double]], vector[pair[double, double]], double, int) - vector[vector[double]] sw_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) - double pss (vector[pair[double, double]], vector[pair[double, double]], double, int) - vector[vector[double]] pss_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], double, int) - double pwg (vector[pair[double, double]], vector[pair[double, double]], int, string, double, double, double) - vector[vector[double]] pwg_matrix (vector[vector[pair[double, double]]], vector[vector[pair[double, double]]], int, string, double, double, double) - -def sliced_wasserstein(diagram_1, diagram_2, sigma = 1, N = 100): - """ - - :param diagram_1: The first diagram. - :type diagram_1: vector[pair[double, double]] - :param diagram_2: The second diagram. - :type diagram_2: vector[pair[double, double]] - :param sigma: bandwidth of Gaussian - :param N: number of directions - - :returns: the sliced wasserstein kernel. - """ - return sw(diagram_1, diagram_2, sigma, N) - -def sliced_wasserstein_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): - """ - - :param diagram_1: The first set of diagrams. - :type diagram_1: vector[vector[pair[double, double]]] - :param diagram_2: The second set of diagrams. - :type diagram_2: vector[vector[pair[double, double]]] - :param sigma: bandwidth of Gaussian - :param N: number of directions - - :returns: the sliced wasserstein kernel matrix. - """ - return sw_matrix(diagrams_1, diagrams_2, sigma, N) - -def persistence_weighted_gaussian(diagram_1, diagram_2, N = 100, weight = "arctan", sigma = 1.0, C = 1.0, p = 1.0): - """ - - :param diagram_1: The first diagram. - :type diagram_1: vector[pair[double, double]] - :param diagram_2: The second diagram. - :type diagram_2: vector[pair[double, double]] - :param N: number of Fourier features - :param weight: weight to use for the diagram points - :param sigma: bandwidth of Gaussian - :param C: cost of arctan persistence weight - :param p: power of arctan persistence weight - - :returns: the persistence weighted gaussian kernel. - """ - return pwg(diagram_1, diagram_2, N, weight, sigma, C, p) - -def persistence_weighted_gaussian_matrix(diagrams_1, diagrams_2, N = 100, weight = "arctan", sigma = 1.0, C = 1.0, p = 1.0): - """ - - :param diagram_1: The first set of diagrams. - :type diagram_1: vector[vector[pair[double, double]]] - :param diagram_2: The second set of diagrams. - :type diagram_2: vector[vector[pair[double, double]]] - :param N: number of Fourier features - :param weight: weight to use for the diagram points - :param sigma: bandwidth of Gaussian - :param C: cost of arctan persistence weight - :param p: power of arctan persistence weight - - :returns: the persistence weighted gaussian kernel matrix. - """ - return pwg_matrix(diagrams_1, diagrams_2, N, weight, sigma, C, p) - -def persistence_scale_space(diagram_1, diagram_2, sigma = 1, N = 100): - """ - - :param diagram_1: The first diagram. - :type diagram_1: vector[pair[double, double]] - :param diagram_2: The second diagram. - :type diagram_2: vector[pair[double, double]] - :param sigma: bandwidth of Gaussian - :param N: number of Fourier features - - :returns: the persistence scale space kernel. - """ - return pss(diagram_1, diagram_2, sigma, N) - -def persistence_scale_space_matrix(diagrams_1, diagrams_2, sigma = 1, N = 100): - """ - - :param diagram_1: The first set of diagrams. - :type diagram_1: vector[vector[pair[double, double]]] - :param diagram_2: The second set of diagrams. - :type diagram_2: vector[vector[pair[double, double]]] - :param sigma: bandwidth of Gaussian - :param N: number of Fourier features - - :returns: the persistence scale space kernel matrix. - """ - return pss_matrix(diagrams_1, diagrams_2, sigma, N) diff --git a/src/cython/cython/vectors.pyx b/src/cython/cython/vectors.pyx deleted file mode 100644 index af53f739..00000000 --- a/src/cython/cython/vectors.pyx +++ /dev/null @@ -1,68 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -import os - -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Mathieu Carriere - - Copyright (C) 2018 INRIA - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . -""" - -__author__ = "Mathieu Carriere" -__copyright__ = "Copyright (C) 2018 INRIA" -__license__ = "GPL v3" - -cdef extern from "Vectors_interface.h" namespace "Gudhi::persistence_diagram": - vector[vector[double]] compute_ls (vector[pair[double, double]], int, double, double, int) - vector[vector[double]] compute_pim (vector[pair[double, double]], double, double, int, double, double, int, string, double, double, double) - -def landscape(diagram, nb_ls = 10, min_x = 0.0, max_x = 1.0, res_x = 100): - """ - - :param diagram: The diagram - :type diagram: vector[pair[double, double]] - :param nb_ls: Number of landscapes - :param min_x: Minimum abscissa - :param max_x: Maximum abscissa - :param res_x: Number of samples - - :returns: the landscape - """ - return compute_ls(diagram, nb_ls, min_x, max_x, res_x) - -def persistence_image(diagram, min_x = 0.0, max_x = 1.0, res_x = 10, min_y = 0.0, max_y = 1.0, res_y = 10, weight = "linear", sigma = 1.0, C = 1.0, p = 1.0): - """ - - :param diagram: The diagram - :type diagram: vector[vector[pair[double, double]]] - :param min_x: Minimum abscissa - :param max_x: Maximum abscissa - :param res_x: Number of abscissa pixels - :param min_x: Minimum ordinate - :param max_x: Maximum ordinate - :param res_x: Number of ordinate pixels - :param weight: Weight to use for the diagram points - :param sigma: bandwidth of Gaussian - :param C: cost of arctan persistence weight - :param p: power of arctan persistence weight - - :returns: the persistence image - """ - return compute_pim(diagram, min_x, max_x, res_x, min_y, max_y, res_y, weight, sigma, C, p) diff --git a/src/cython/include/Kernels_interface.h b/src/cython/include/Kernels_interface.h deleted file mode 100644 index a07d7820..00000000 --- a/src/cython/include/Kernels_interface.h +++ /dev/null @@ -1,130 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef INCLUDE_KERNELS_INTERFACE_H_ -#define INCLUDE_KERNELS_INTERFACE_H_ - -#include -#include -#include -#include - -#include -#include -#include // for std::pair - -namespace Gudhi { - -namespace persistence_diagram { - - - // ******************* - // Kernel evaluations. - // ******************* - - double sw(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - Gudhi::Persistence_representations::Sliced_Wasserstein sw1(diag1, sigma, N); - Gudhi::Persistence_representations::Sliced_Wasserstein sw2(diag2, sigma, N); - return sw1.compute_scalar_product(sw2); - } - - double pwg(const std::vector>& diag1, const std::vector>& diag2, int N, std::string weight, double sigma, double C, double p) { - Gudhi::Persistence_representations::Weight weight_fn; - if(weight.compare("linear") == 0) weight_fn = Gudhi::Persistence_representations::linear_weight; - if(weight.compare("arctan") == 0) weight_fn = Gudhi::Persistence_representations::arctan_weight(C,p); - if(weight.compare("const") == 0) weight_fn = Gudhi::Persistence_representations::const_weight; - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, sigma, N, weight_fn); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, sigma, N, weight_fn); - return pwg1.compute_scalar_product(pwg2); - } - - double pss(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - std::vector> pd1 = diag1; int numpts = diag1.size(); for(int i = 0; i < numpts; i++) pd1.emplace_back(diag1[i].second,diag1[i].first); - std::vector> pd2 = diag2; numpts = diag2.size(); for(int i = 0; i < numpts; i++) pd2.emplace_back(diag2[i].second,diag2[i].first); - - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(pd1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(pd2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); - - return pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma); - } - - double pss_sym(const std::vector>& diag1, const std::vector>& diag2, double sigma, int N) { - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg1(diag1, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); - Gudhi::Persistence_representations::Persistence_weighted_gaussian pwg2(diag2, 2*std::sqrt(sigma), N, Gudhi::Persistence_representations::pss_weight); - - return pwg1.compute_scalar_product (pwg2) / (16*Gudhi::Persistence_representations::pi*sigma); - } - - - // **************** - // Kernel matrices. - // **************** - - std::vector > sw_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N){ - std::vector > matrix; - std::vector ss1; - int num_diag_1 = s1.size(); for(int i = 0; i < num_diag_1; i++){Gudhi::Persistence_representations::Sliced_Wasserstein sw1(s1[i], sigma, N); ss1.push_back(sw1);} - std::vector ss2; - int num_diag_2 = s2.size(); for(int i = 0; i < num_diag_2; i++){Gudhi::Persistence_representations::Sliced_Wasserstein sw2(s2[i], sigma, N); ss2.push_back(sw2);} - for(int i = 0; i < num_diag_1; i++){ - std::cout << 100.0*i/num_diag_1 << " %" << std::endl; - std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(ss1[i].compute_scalar_product(ss2[j])); matrix.push_back(ps); - } - return matrix; - } - - std::vector > pwg_matrix(const std::vector > >& s1, const std::vector > >& s2, int N, std::string weight, double sigma, double C, double p){ - std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); - for(int i = 0; i < num_diag_1; i++){ - std::cout << 100.0*i/num_diag_1 << " %" << std::endl; - std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pwg(s1[i], s2[j], N, weight, sigma, C, p)); matrix.push_back(ps); - } - return matrix; - } - - std::vector > pss_matrix(const std::vector > >& s1, const std::vector > >& s2, double sigma, int N){ - std::vector > > ss1, ss2; std::vector > matrix; int num_diag_1 = s1.size(); int num_diag_2 = s2.size(); - for(int i = 0; i < num_diag_1; i++){ - std::vector> pd1 = s1[i]; int numpts = s1[i].size(); - for(int j = 0; j < numpts; j++) pd1.emplace_back(s1[i][j].second,s1[i][j].first); - ss1.push_back(pd1); - } - - for(int i = 0; i < num_diag_2; i++){ - std::vector> pd2 = s2[i]; int numpts = s2[i].size(); - for(int j = 0; j < numpts; j++) pd2.emplace_back(s2[i][j].second,s2[i][j].first); - ss2.push_back(pd2); - } - - for(int i = 0; i < num_diag_1; i++){ - std::cout << 100.0*i/num_diag_1 << " %" << std::endl; - std::vector ps; for(int j = 0; j < num_diag_2; j++) ps.push_back(pss_sym(ss1[i], ss2[j], sigma, N)); matrix.push_back(ps); - } - return matrix; - } - -} // namespace persistence_diagram - -} // namespace Gudhi - - -#endif // INCLUDE_KERNELS_INTERFACE_H_ diff --git a/src/cython/include/Vectors_interface.h b/src/cython/include/Vectors_interface.h deleted file mode 100644 index 902ccc10..00000000 --- a/src/cython/include/Vectors_interface.h +++ /dev/null @@ -1,59 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carriere - * - * Copyright (C) 2018 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#ifndef INCLUDE_VECTORS_INTERFACE_H_ -#define INCLUDE_VECTORS_INTERFACE_H_ - -#include -#include -#include - -#include -#include -#include // for std::pair - -using Weight = std::function) >; - -namespace Gudhi { - -namespace persistence_diagram { - - std::vector > compute_ls(const std::vector >& diag, int nb_ls, double min_x, double max_x, int res_x) { - Gudhi::Persistence_representations::Persistence_landscape_on_grid_exact L(diag, nb_ls, min_x, max_x, res_x); - return L.vectorize(); - } - - std::vector > compute_pim(const std::vector >& diag, double min_x, double max_x, int res_x, double min_y, double max_y, int res_y, std::string weight, double sigma, double C, double p) { - Weight weight_fn; - if(weight.compare("linear") == 0) weight_fn = Gudhi::Persistence_representations::linear_weight; - if(weight.compare("arctan") == 0) weight_fn = Gudhi::Persistence_representations::arctan_weight(C,p); - if(weight.compare("const") == 0) weight_fn = Gudhi::Persistence_representations::const_weight; - Gudhi::Persistence_representations::Persistence_heat_maps_exact P(diag, min_x, max_x, res_x, min_y, max_y, res_y, weight_fn, sigma); - return P.vectorize(); - } - -} // namespace persistence_diagram - -} // namespace Gudhi - - -#endif // INCLUDE_VECTORS_INTERFACE_H_ -- cgit v1.2.3 From 0c372ac3217ef31607c25266ff4394b5fa1ca2a8 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Tue, 3 Jul 2018 05:52:22 +0000 Subject: corrected test units git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3662 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 5b37dde84d00538ff15b7e638ba1f2d6800573c0 --- .../include/gudhi/common_persistence_representations.h | 2 +- src/Persistence_representations/test/kernels.cpp | 13 +++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/Persistence_representations/include/gudhi/common_persistence_representations.h b/src/Persistence_representations/include/gudhi/common_persistence_representations.h index 024c99ec..66ed3bf8 100644 --- a/src/Persistence_representations/include/gudhi/common_persistence_representations.h +++ b/src/Persistence_representations/include/gudhi/common_persistence_representations.h @@ -49,7 +49,7 @@ using Weight = std::function) >; using Kernel = std::function, std::pair )>; Kernel Gaussian_kernel(double sigma){ - return [=](std::pair p, std::pair q){return std::exp( -((p.first-q.first)*(p.first-q.first) + (p.second-q.second)*(p.second-q.second)) / (sigma*sigma) );}; + return [=](std::pair p, std::pair q){return (1.0 / (std::sqrt(2*pi)*sigma)) * std::exp( -((p.first-q.first)*(p.first-q.first) + (p.second-q.second)*(p.second-q.second)) / (2*sigma*sigma) );}; } Kernel polynomial_kernel(double c, double d){ diff --git a/src/Persistence_representations/test/kernels.cpp b/src/Persistence_representations/test/kernels.cpp index 9db19123..c95e8086 100644 --- a/src/Persistence_representations/test/kernels.cpp +++ b/src/Persistence_representations/test/kernels.cpp @@ -29,21 +29,22 @@ #include #include #include // std::max +#include #include -#include -#include #include #include #include +using constant_scaling_function = Gudhi::Persistence_representations::constant_scaling_function; using SW = Gudhi::Persistence_representations::Sliced_Wasserstein; -using PWG = Gudhi::Persistence_representations::Persistence_weighted_gaussian; +using PWG = Gudhi::Persistence_representations::Persistence_heat_maps; +using Persistence_diagram = std::vector >; BOOST_AUTO_TEST_CASE(check_PWG) { Persistence_diagram v1, v2; v1.emplace_back(0,1); v2.emplace_back(0,2); - PWG pwg1(v1, 1.0, 1000, Gudhi::Persistence_representations::arctan_weight(1,1)); PWG pwgex1(v1, 1.0, -1, Gudhi::Persistence_representations::arctan_weight(1,1)); - PWG pwg2(v2, 1.0, 1000, Gudhi::Persistence_representations::arctan_weight(1,1)); PWG pwgex2(v2, 1.0, -1, Gudhi::Persistence_representations::arctan_weight(1,1)); - BOOST_CHECK(std::abs(pwg1.compute_scalar_product(pwg2) - pwgex1.compute_scalar_product(pwgex2)) <= 1e-1); + PWG pwg1(v1, Gudhi::Persistence_representations::Gaussian_kernel(1.0)); + PWG pwg2(v2, Gudhi::Persistence_representations::Gaussian_kernel(1.0)); + BOOST_CHECK(std::abs(pwg1.compute_scalar_product(pwg2) - std::exp(-0.5)/(std::sqrt(2*Gudhi::Persistence_representations::pi))) <= 1e-3); } BOOST_AUTO_TEST_CASE(check_SW) { -- cgit v1.2.3 From 6673dbcb6474d8521cf79dd6b7a1f342b17cee17 Mon Sep 17 00:00:00 2001 From: fgodi Date: Sun, 8 Jul 2018 08:23:13 +0000 Subject: ordered simplices git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3685 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 56442cd98146404e1b9eab10d32bb3d18aa2baee --- src/Toplex_map/include/gudhi/Toplex_map.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index 7a2e5b09..73d2c63d 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -25,7 +25,7 @@ public: /** Simplex is the type of simplices. * \ingroup toplex_map */ - typedef std::unordered_set Simplex; + typedef std::set Simplex; /** The type of the pointers to maximal simplices. * \ingroup toplex_map */ -- cgit v1.2.3 From a6ba309f1995700369e6b7b2c38f10ce0f9fd010 Mon Sep 17 00:00:00 2001 From: fgodi Date: Sun, 8 Jul 2018 08:36:05 +0000 Subject: doc lazy git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/toplex_map@3686 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 9d94007cdd821651df934054481a3eaedfcc8e50 --- src/Toplex_map/include/gudhi/Lazy_Toplex_map.h | 32 +++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h index 31f3da4b..25281998 100644 --- a/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Lazy_Toplex_map.h @@ -7,6 +7,9 @@ namespace Gudhi { +/** A Lazy_Toplex_map represents the simplicial complex. + * A "toplex" is a maximal simplex but not all simplices in a LTM are toplices. + * \ingroup toplex_map */ class Lazy_Toplex_map { public: @@ -27,20 +30,43 @@ public: * \ingroup toplex_map */ typedef Toplex_map::Simplex_ptr_set Simplex_ptr_set; + /** Adds the given simplex to the complex. + * The simplex must not have maximal coface in the complex. + * \ingroup toplex_map */ template - void insert_max_simplex(const Input_vertex_range &vertex_range); + void insert_independent_simplex(const Input_vertex_range &vertex_range); + + /** \brief Adds the given simplex to the complex. + * Nothing happens if the simplex has a coface in the complex. + * \ingroup toplex_map */ template bool insert_simplex(const Input_vertex_range &vertex_range); + + /** \brief Removes the given simplex and its cofaces from the complex. + * Its faces are kept inside. + * \ingroup toplex_map */ template void remove_simplex(const Input_vertex_range &vertex_range); + /** Does a simplex belong to the complex ? + * \ingroup toplex_map */ template bool membership(const Input_vertex_range &vertex_range); + + + /** Do all the facets of a simplex belong to the complex ? + * \ingroup toplex_map */ template bool all_facets_inside(const Input_vertex_range &vertex_range); + /** Contracts one edge in the complex. + * The edge has to verify the link condition if you want to preserve topology. + * Returns the remaining vertex. + * \ingroup toplex_map */ Vertex contraction(const Vertex x, const Vertex y); + /** \brief Number of simplices stored. + * \ingroup toplex_map */ std::size_t num_simplices() const; std::unordered_map gamma0_lbounds; @@ -69,7 +95,7 @@ private: }; template -void Lazy_Toplex_map::insert_max_simplex(const Input_vertex_range &vertex_range){ +void Lazy_Toplex_map::insert_independent_simplex(const Input_vertex_range &vertex_range){ for(const Vertex& v : vertex_range) if(!gamma0_lbounds.count(v)) gamma0_lbounds.emplace(v,1); else gamma0_lbounds[v]++; @@ -116,7 +142,7 @@ void Lazy_Toplex_map::remove_simplex(const Input_vertex_range &vertex_range){ if(included(vertex_range, *sptr)){ erase_max(*sptr); for(const Simplex& f : facets(vertex_range)) - insert_max_simplex(f); + insert_independent_simplex(f); } } } -- cgit v1.2.3 From 6d00273077db54d609262e79702cbd5a94491105 Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 23 Aug 2018 21:11:47 +0000 Subject: git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3827 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a6a297a7f14703e55954706328072acdd447484c --- .../include/gudhi/Persistence_heat_maps.h | 2 -- .../include/gudhi/Sliced_Wasserstein.h | 42 ++++++---------------- src/cmake/modules/GUDHI_modules.cmake | 2 +- 3 files changed, 12 insertions(+), 34 deletions(-) diff --git a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h index 12188526..43f10b8c 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h +++ b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h @@ -1002,8 +1002,6 @@ double Persistence_heat_maps::compute_scalar_product(const } else{ - GUDHI_CHECK(this->approx != second.approx || this->f != second.f, std::invalid_argument("Error: different values for representations")); - int num_pts1 = this->d.size(); int num_pts2 = second.d.size(); double kernel_val = 0; for(int i = 0; i < num_pts1; i++) for(int j = 0; j < num_pts2; j++) diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index a3c0dc2f..6f67f7bc 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -97,23 +97,7 @@ class Sliced_Wasserstein { // Compute the angle formed by two points of a PD double compute_angle(const Persistence_diagram & diag, int i, int j) const { - std::pair vect; double x1,y1, x2,y2; - x1 = diag[i].first; y1 = diag[i].second; - x2 = diag[j].first; y2 = diag[j].second; - if (y1 - y2 > 0){ - vect.first = y1 - y2; - vect.second = x2 - x1;} - else{ - if(y1 - y2 < 0){ - vect.first = y2 - y1; - vect.second = x1 - x2; - } - else{ - vect.first = 0; - vect.second = abs(x1 - x2);} - } - double norm = std::sqrt(vect.first*vect.first + vect.second*vect.second); - return asin(vect.second/norm); + if(diag[i].second == diag[j].second) return pi/2; else return atan((diag[j].first-diag[i].first)/(diag[i].second-diag[j].second)); } // Compute the integral of |cos()| between alpha and beta, valid only if alpha is in [-pi,pi] and beta-alpha is in [0,pi] @@ -145,10 +129,7 @@ class Sliced_Wasserstein { double compute_int(double theta1, double theta2, int p, int q, const Persistence_diagram & diag1, const Persistence_diagram & diag2) const { double norm = std::sqrt( (diag1[p].first-diag2[q].first)*(diag1[p].first-diag2[q].first) + (diag1[p].second-diag2[q].second)*(diag1[p].second-diag2[q].second) ); double angle1; - if (diag1[p].first > diag2[q].first) - angle1 = theta1 - asin( (diag1[p].second-diag2[q].second)/norm ); - else - angle1 = theta1 - asin( (diag2[q].second-diag1[p].second)/norm ); + if (diag1[p].first == diag2[q].first) angle1 = theta1 - pi/2; else angle1 = theta1 - atan((diag1[p].second-diag2[q].second)/(diag1[p].first-diag2[q].first)); double angle2 = angle1 + theta2 - theta1; double integral = compute_int_cos(angle1,angle2); return norm*integral; @@ -164,24 +145,23 @@ class Sliced_Wasserstein { if(this->approx == -1){ // Add projections onto diagonal. - int n1, n2; n1 = diagram1.size(); n2 = diagram2.size(); double max_ordinate = std::numeric_limits::lowest(); + int n1, n2; n1 = diagram1.size(); n2 = diagram2.size(); double min_ordinate = std::numeric_limits::max(); double min_abscissa = std::numeric_limits::max(); for (int i = 0; i < n2; i++){ - max_ordinate = std::max(max_ordinate, diagram2[i].second); + min_ordinate = std::min(min_ordinate, diagram2[i].second); min_abscissa = std::min(min_abscissa, diagram2[i].first); diagram1.emplace_back( (diagram2[i].first+diagram2[i].second)/2, (diagram2[i].first+diagram2[i].second)/2 ); } for (int i = 0; i < n1; i++){ - max_ordinate = std::max(max_ordinate, diagram1[i].second); + min_ordinate = std::min(min_ordinate, diagram1[i].second); min_abscissa = std::min(min_abscissa, diagram1[i].first); diagram2.emplace_back( (diagram1[i].first+diagram1[i].second)/2, (diagram1[i].first+diagram1[i].second)/2 ); } int num_pts_dgm = diagram1.size(); // Slightly perturb the points so that the PDs are in generic positions. - int mag = 0; while(max_ordinate > 10){mag++; max_ordinate/=10;} - double thresh = pow(10,-5+mag); + double thresh_y = pow(10,log10(min_ordinate)-5); double thresh_x = pow(10,log10(min_abscissa)-5); srand(time(NULL)); for (int i = 0; i < num_pts_dgm; i++){ - diagram1[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); diagram1[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); - diagram2[i].first += thresh*(1.0-2.0*rand()/RAND_MAX); diagram2[i].second += thresh*(1.0-2.0*rand()/RAND_MAX); + diagram1[i].first += thresh_x*(1.0-2.0*rand()/RAND_MAX); diagram1[i].second += thresh_y*(1.0-2.0*rand()/RAND_MAX); + diagram2[i].first += thresh_x*(1.0-2.0*rand()/RAND_MAX); diagram2[i].second += thresh_y*(1.0-2.0*rand()/RAND_MAX); } // Compute all angles in both PDs. @@ -201,8 +181,8 @@ class Sliced_Wasserstein { // Initialize orders of the points of both PDs (given by ordinates when theta = -pi/2). std::vector orderp1, orderp2; for (int i = 0; i < num_pts_dgm; i++){ orderp1.push_back(i); orderp2.push_back(i); } - std::sort( orderp1.begin(), orderp1.end(), [=](int i, int j){ if(diagram1[i].second != diagram1[j].second) return (diagram1[i].second < diagram1[j].second); else return (diagram1[i].first > diagram1[j].first); } ); - std::sort( orderp2.begin(), orderp2.end(), [=](int i, int j){ if(diagram2[i].second != diagram2[j].second) return (diagram2[i].second < diagram2[j].second); else return (diagram2[i].first > diagram2[j].first); } ); + std::sort( orderp1.begin(), orderp1.end(), [&](int i, int j){ if(diagram1[i].second != diagram1[j].second) return (diagram1[i].second < diagram1[j].second); else return (diagram1[i].first > diagram1[j].first); } ); + std::sort( orderp2.begin(), orderp2.end(), [&](int i, int j){ if(diagram2[i].second != diagram2[j].second) return (diagram2[i].second < diagram2[j].second); else return (diagram2[i].first > diagram2[j].first); } ); // Find the inverses of the orders. std::vector order1(num_pts_dgm); std::vector order2(num_pts_dgm); @@ -274,6 +254,7 @@ class Sliced_Wasserstein { public: /** \brief Sliced Wasserstein kernel constructor. + * \implements Topological_data_with_distances, Real_valued_topological_data, Topological_data_with_scalar_product * \ingroup Sliced_Wasserstein * * @param[in] _diagram persistence diagram. @@ -282,7 +263,6 @@ class Sliced_Wasserstein { * points on all directions are stored in memory to reduce computation time. * */ - // This class implements the following concepts: Topological_data_with_distances, Real_valued_topological_data, Topological_data_with_scalar_product Sliced_Wasserstein(const Persistence_diagram & _diagram, double _sigma = 1.0, int _approx = 10):diagram(_diagram), approx(_approx), sigma(_sigma) {build_rep();} /** \brief Evaluation of the kernel on a pair of diagrams. diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake index 276fb2cc..f95d0c34 100644 --- a/src/cmake/modules/GUDHI_modules.cmake +++ b/src/cmake/modules/GUDHI_modules.cmake @@ -17,7 +17,7 @@ function(add_gudhi_module file_path) endfunction(add_gudhi_module) option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF) -option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" ON) +option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF) option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON) option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON) option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON) -- cgit v1.2.3 From cc1a09fa5b00b7bea1d3f7bac0cce3c401d23dce Mon Sep 17 00:00:00 2001 From: mcarrier Date: Thu, 23 Aug 2018 21:13:02 +0000 Subject: rm Doxyfile git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/kernels@3828 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 501fca20271b60930f8078d29413a192c87f1962 --- src/Doxyfile | 2316 ---------------------------------------------------------- 1 file changed, 2316 deletions(-) delete mode 100644 src/Doxyfile diff --git a/src/Doxyfile b/src/Doxyfile deleted file mode 100644 index da753c04..00000000 --- a/src/Doxyfile +++ /dev/null @@ -1,2316 +0,0 @@ -# Doxyfile 1.8.6 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project. -# -# All text after a double hash (##) is considered a comment and is placed in -# front of the TAG it is preceding. -# -# All text after a single hash (#) is considered a comment and will be ignored. -# The format is: -# TAG = value [value, ...] -# For lists, items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (\" \"). - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all text -# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv -# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv -# for the list of possible encodings. -# The default value is: UTF-8. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by -# double-quotes, unless you are using Doxywizard) that should identify the -# project for which the documentation is generated. This name is used in the -# title of most generated pages and in a few other places. -# The default value is: My Project. - -PROJECT_NAME = "GUDHI" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. This -# could be handy for archiving the generated documentation or if some version -# control system is used. - -PROJECT_NUMBER = "2.2.0" - -# Using the PROJECT_BRIEF tag one can provide an optional one line description -# for a project that appears at the top of each page and should give viewer a -# quick idea about the purpose of the project. Keep the description short. - -PROJECT_BRIEF = "C++ library for Topological Data Analysis (TDA) and Higher Dimensional Geometry Understanding." - -# With the PROJECT_LOGO tag one can specify an logo or icon that is included in -# the documentation. The maximum height of the logo should not exceed 55 pixels -# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo -# to the output directory. - -PROJECT_LOGO = - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path -# into which the generated documentation will be written. If a relative path is -# entered, it will be relative to the location where doxygen was started. If -# left blank the current directory will be used. - -OUTPUT_DIRECTORY = "doc/" - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- -# directories (in 2 levels) under the output directory of each output format and -# will distribute the generated files over these directories. Enabling this -# option can be useful when feeding doxygen a huge amount of source files, where -# putting all generated files in the same directory would otherwise causes -# performance problems for the file system. -# The default value is: NO. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, -# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), -# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, -# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), -# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, -# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, -# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, -# Ukrainian and Vietnamese. -# The default value is: English. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member -# descriptions after the members that are listed in the file and class -# documentation (similar to Javadoc). Set to NO to disable this. -# The default value is: YES. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief -# description of a member or function before the detailed description -# -# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. -# The default value is: YES. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator that is -# used to form the text in various listings. Each string in this list, if found -# as the leading text of the brief description, will be stripped from the text -# and the result, after processing the whole list, is used as the annotated -# text. Otherwise, the brief description is used as-is. If left blank, the -# following values are used ($name is automatically replaced with the name of -# the entity):The $name class, The $name widget, The $name file, is, provides, -# specifies, contains, represents, a, an and the. - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# doxygen will generate a detailed section even if there is only a brief -# description. -# The default value is: NO. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. -# The default value is: NO. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path -# before files name in the file list and in the header files. If set to NO the -# shortest path that makes the file name unique will be used -# The default value is: YES. - -FULL_PATH_NAMES = YES - -# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. -# Stripping is only done if one of the specified strings matches the left-hand -# part of the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the path to -# strip. -# -# Note that you can specify absolute paths here, but also relative paths, which -# will be relative from the directory where doxygen is started. -# This tag requires that the tag FULL_PATH_NAMES is set to YES. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the -# path mentioned in the documentation of a class, which tells the reader which -# header file to include in order to use a class. If left blank only the name of -# the header file containing the class definition is used. Otherwise one should -# specify the list of include paths that are normally passed to the compiler -# using the -I flag. - -STRIP_FROM_INC_PATH = include concept - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but -# less readable) file names. This can be useful is your file systems doesn't -# support long names like on DOS, Mac, or CD-ROM. -# The default value is: NO. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the -# first line (until the first dot) of a Javadoc-style comment as the brief -# description. If set to NO, the Javadoc-style will behave just like regular Qt- -# style comments (thus requiring an explicit @brief command for a brief -# description.) -# The default value is: NO. - -JAVADOC_AUTOBRIEF = NO - -# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first -# line (until the first dot) of a Qt-style comment as the brief description. If -# set to NO, the Qt-style will behave just like regular Qt-style comments (thus -# requiring an explicit \brief command for a brief description.) -# The default value is: NO. - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a -# multi-line C++ special comment block (i.e. a block of //! or /// comments) as -# a brief description. This used to be the default behavior. The new default is -# to treat a multi-line C++ comment block as a detailed description. Set this -# tag to YES if you prefer the old behavior instead. -# -# Note that setting this tag to YES also means that rational rose comments are -# not recognized any more. -# The default value is: NO. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the -# documentation from any documented member that it re-implements. -# The default value is: YES. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a -# new page for each member. If set to NO, the documentation of a member will be -# part of the file/class/namespace that contains it. -# The default value is: NO. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen -# uses this value to replace tabs by spaces in code fragments. -# Minimum value: 1, maximum value: 16, default value: 4. - -TAB_SIZE = 4 - -# This tag can be used to specify a number of aliases that act as commands in -# the documentation. An alias has the form: -# name=value -# For example adding -# "sideeffect=@par Side Effects:\n" -# will allow you to put the command \sideeffect (or @sideeffect) in the -# documentation, which will result in a user-defined paragraph with heading -# "Side Effects:". You can put \n's in the value part of an alias to insert -# newlines. - -ALIASES = - -# This tag can be used to specify a number of word-keyword mappings (TCL only). -# A mapping has the form "name=value". For example adding "class=itcl::class" -# will allow you to use the command class in the itcl::class meaning. - -TCL_SUBST = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources -# only. Doxygen will then generate output that is more tailored for C. For -# instance, some of the names that are used will be different. The list of all -# members will be omitted, etc. -# The default value is: NO. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or -# Python sources only. Doxygen will then generate output that is more tailored -# for that language. For instance, namespaces will be presented as packages, -# qualified scopes will look different, etc. -# The default value is: NO. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources. Doxygen will then generate output that is tailored for Fortran. -# The default value is: NO. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for VHDL. -# The default value is: NO. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it -# parses. With this tag you can assign which parser to use for a given -# extension. Doxygen has a built-in mapping, but you can override or extend it -# using this tag. The format is ext=language, where ext is a file extension, and -# language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# C#, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL. For instance to make -# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C -# (default is Fortran), use: inc=Fortran f=C. -# -# Note For files without extension you can use no_extension as a placeholder. -# -# Note that for custom extensions you also need to set FILE_PATTERNS otherwise -# the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments -# according to the Markdown format, which allows for more readable -# documentation. See http://daringfireball.net/projects/markdown/ for details. -# The output of markdown processing is further processed by doxygen, so you can -# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in -# case of backward compatibilities issues. -# The default value is: YES. - -MARKDOWN_SUPPORT = YES - -# When enabled doxygen tries to link words that correspond to documented -# classes, or namespaces to their corresponding documentation. Such a link can -# be prevented in individual cases by by putting a % sign in front of the word -# or globally by setting AUTOLINK_SUPPORT to NO. -# The default value is: YES. - -AUTOLINK_SUPPORT = YES - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should set this -# tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); -# versus func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. -# The default value is: NO. - -BUILTIN_STL_SUPPORT = NO - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. -# The default value is: NO. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: -# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen -# will parse them like normal C++ but will assume all classes use public instead -# of private inheritance when no explicit protection keyword is present. -# The default value is: NO. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate -# getter and setter methods for a property. Setting this option to YES will make -# doxygen to replace the get and set methods by a property in the documentation. -# This will only work if the methods are indeed getting or setting a simple -# type. If this is not the case, or you want to show the methods anyway, you -# should set this option to NO. -# The default value is: YES. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. -# The default value is: NO. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES to allow class member groups of the same type -# (for instance a group of public functions) to be put as a subgroup of that -# type (e.g. under the Public Functions section). Set it to NO to prevent -# subgrouping. Alternatively, this can be done per class using the -# \nosubgrouping command. -# The default value is: YES. - -SUBGROUPING = YES - -# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions -# are shown inside the group in which they are included (e.g. using \ingroup) -# instead of on a separate page (for HTML and Man pages) or section (for LaTeX -# and RTF). -# -# Note that this feature does not work in combination with -# SEPARATE_MEMBER_PAGES. -# The default value is: NO. - -INLINE_GROUPED_CLASSES = NO - -# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions -# with only public data fields or simple typedef fields will be shown inline in -# the documentation of the scope in which they are defined (i.e. file, -# namespace, or group documentation), provided this scope is documented. If set -# to NO, structs, classes, and unions are shown on a separate page (for HTML and -# Man pages) or section (for LaTeX and RTF). -# The default value is: NO. - -INLINE_SIMPLE_STRUCTS = NO - -# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or -# enum is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically be -# useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. -# The default value is: NO. - -TYPEDEF_HIDES_STRUCT = NO - -# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This -# cache is used to resolve symbols given their name and scope. Since this can be -# an expensive process and often the same symbol appears multiple times in the -# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small -# doxygen will become slower. If the cache is too large, memory is wasted. The -# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range -# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 -# symbols. At the end of a run doxygen will report the cache usage and suggest -# the optimal cache size from a speed point of view. -# Minimum value: 0, maximum value: 9, default value: 0. - -LOOKUP_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. Private -# class members and static file members will be hidden unless the -# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. -# Note: This will also disable the warnings about undocumented members that are -# normally produced when WARNINGS is set to YES. -# The default value is: NO. - -EXTRACT_ALL = NO - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will -# be included in the documentation. -# The default value is: NO. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal -# scope will be included in the documentation. -# The default value is: NO. - -EXTRACT_PACKAGE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file will be -# included in the documentation. -# The default value is: NO. - -EXTRACT_STATIC = NO - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined -# locally in source files will be included in the documentation. If set to NO -# only classes defined in header files are included. Does not have any effect -# for Java sources. -# The default value is: YES. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local methods, -# which are defined in the implementation section but not in the interface are -# included in the documentation. If set to NO only methods in the interface are -# included. -# The default value is: NO. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base name of -# the file that contains the anonymous namespace. By default anonymous namespace -# are hidden. -# The default value is: NO. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all -# undocumented members inside documented classes or files. If set to NO these -# members will be included in the various overviews, but no documentation -# section is generated. This option has no effect if EXTRACT_ALL is enabled. -# The default value is: NO. - -HIDE_UNDOC_MEMBERS = YES - -# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. If set -# to NO these classes will be included in the various overviews. This option has -# no effect if EXTRACT_ALL is enabled. -# The default value is: NO. - -HIDE_UNDOC_CLASSES = YES - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend -# (class|struct|union) declarations. If set to NO these declarations will be -# included in the documentation. -# The default value is: NO. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any -# documentation blocks found inside the body of a function. If set to NO these -# blocks will be appended to the function's detailed documentation block. -# The default value is: NO. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation that is typed after a -# \internal command is included. If the tag is set to NO then the documentation -# will be excluded. Set it to YES to include the internal documentation. -# The default value is: NO. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file -# names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. -# The default value is: system dependent. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with -# their full class and namespace scopes in the documentation. If set to YES the -# scope will be hidden. -# The default value is: NO. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of -# the files that are included by a file in the documentation of that file. -# The default value is: YES. - -SHOW_INCLUDE_FILES = NO - -# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each -# grouped member an include statement to the documentation, telling the reader -# which file to include in order to use the member. -# The default value is: NO. - -SHOW_GROUPED_MEMB_INC = NO - -# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include -# files with double quotes in the documentation rather than with sharp brackets. -# The default value is: NO. - -FORCE_LOCAL_INCLUDES = NO - -# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the -# documentation for inline members. -# The default value is: YES. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the -# (detailed) documentation of file and class members alphabetically by member -# name. If set to NO the members will appear in declaration order. -# The default value is: YES. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief -# descriptions of file, namespace and class members alphabetically by member -# name. If set to NO the members will appear in declaration order. Note that -# this will also influence the order of the classes in the class list. -# The default value is: NO. - -SORT_BRIEF_DOCS = NO - -# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the -# (brief and detailed) documentation of class members so that constructors and -# destructors are listed first. If set to NO the constructors will appear in the -# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. -# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief -# member documentation. -# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting -# detailed member documentation. -# The default value is: NO. - -SORT_MEMBERS_CTORS_1ST = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy -# of group names into alphabetical order. If set to NO the group names will -# appear in their defined order. -# The default value is: NO. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by -# fully-qualified names, including namespaces. If set to NO, the class list will -# be sorted only by class name, not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the alphabetical -# list. -# The default value is: NO. - -SORT_BY_SCOPE_NAME = NO - -# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper -# type resolution of all parameters of a function it will reject a match between -# the prototype and the implementation of a member function even if there is -# only one candidate or it is obvious which candidate to choose by doing a -# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still -# accept a match between prototype and implementation in such cases. -# The default value is: NO. - -STRICT_PROTO_MATCHING = NO - -# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the -# todo list. This list is created by putting \todo commands in the -# documentation. -# The default value is: YES. - -GENERATE_TODOLIST = NO - -# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the -# test list. This list is created by putting \test commands in the -# documentation. -# The default value is: YES. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug -# list. This list is created by putting \bug commands in the documentation. -# The default value is: YES. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) -# the deprecated list. This list is created by putting \deprecated commands in -# the documentation. -# The default value is: YES. - -GENERATE_DEPRECATEDLIST= NO - -# The ENABLED_SECTIONS tag can be used to enable conditional documentation -# sections, marked by \if ... \endif and \cond -# ... \endcond blocks. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the -# initial value of a variable or macro / define can have for it to appear in the -# documentation. If the initializer consists of more lines than specified here -# it will be hidden. Use a value of 0 to hide initializers completely. The -# appearance of the value of individual variables and macros / defines can be -# controlled using \showinitializer or \hideinitializer command in the -# documentation regardless of this setting. -# Minimum value: 0, maximum value: 10000, default value: 30. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at -# the bottom of the documentation of classes and structs. If set to YES the list -# will mention the files that were used to generate the documentation. -# The default value is: YES. - -SHOW_USED_FILES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This -# will remove the Files entry from the Quick Index and from the Folder Tree View -# (if specified). -# The default value is: YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces -# page. This will remove the Namespaces entry from the Quick Index and from the -# Folder Tree View (if specified). -# The default value is: YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command command input-file, where command is the value of the -# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided -# by doxygen. Whatever the program writes to standard output is used as the file -# version. For an example see the documentation. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed -# by doxygen. The layout file controls the global structure of the generated -# output files in an output format independent way. To create the layout file -# that represents doxygen's defaults, run doxygen with the -l option. You can -# optionally specify a file name after the option, if omitted DoxygenLayout.xml -# will be used as the name of the layout file. -# -# Note that if you run doxygen from a directory containing a file called -# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE -# tag is left empty. - -LAYOUT_FILE = - -# The CITE_BIB_FILES tag can be used to specify one or more bib files containing -# the reference definitions. This must be a list of .bib files. The .bib -# extension is automatically appended if omitted. This requires the bibtex tool -# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. -# For LaTeX the style of the bibliography can be controlled using -# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the -# search path. Do not use file names with spaces, bibtex cannot handle them. See -# also \cite for info how to create references. - -CITE_BIB_FILES = biblio/bibliography.bib \ - biblio/how_to_cite_cgal.bib \ - biblio/how_to_cite_gudhi.bib - -#--------------------------------------------------------------------------- -# Configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated to -# standard output by doxygen. If QUIET is set to YES this implies that the -# messages are off. -# The default value is: NO. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES -# this implies that the warnings are on. -# -# Tip: Turn warnings on while writing the documentation. -# The default value is: YES. - -WARNINGS = YES - -# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate -# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag -# will automatically be disabled. -# The default value is: YES. - -WARN_IF_UNDOCUMENTED = YES - -# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some parameters -# in a documented function, or documenting parameters that don't exist or using -# markup commands wrongly. -# The default value is: YES. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that -# are documented, but have no documentation for their parameters or return -# value. If set to NO doxygen will only warn about wrong or incomplete parameter -# documentation, but not about the absence of documentation. -# The default value is: NO. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that doxygen -# can produce. The string should contain the $file, $line, and $text tags, which -# will be replaced by the file and line number from which the warning originated -# and the warning text. Optionally the format may contain $version, which will -# be replaced by the version of the file (if it could be obtained via -# FILE_VERSION_FILTER) -# The default value is: $file:$line: $text. - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning and error -# messages should be written. If left blank the output is written to standard -# error (stderr). - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# Configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag is used to specify the files and/or directories that contain -# documented source files. You may enter file names like myfile.cpp or -# directories like /usr/src/myproject. Separate the files or directories with -# spaces. -# Note: If this tag is empty the current directory is searched. - -INPUT = - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses -# libiconv (or the iconv built into libc) for the transcoding. See the libiconv -# documentation (see: http://www.gnu.org/software/libiconv) for the list of -# possible encodings. -# The default value is: UTF-8. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and -# *.h) to filter out the source-files in the directories. If left blank the -# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, -# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, -# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, -# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, -# *.qsf, *.as and *.js. - -FILE_PATTERNS = - -# The RECURSIVE tag can be used to specify whether or not subdirectories should -# be searched for input files as well. -# The default value is: NO. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should be -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. -# -# Note that relative paths are relative to the directory from which doxygen is -# run. - -EXCLUDE = data/ \ - example/ \ - GudhUI/ \ - cmake/ \ - src/cython/ \ - include/gudhi_patches/ - -# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or -# directories that are symbolic links (a Unix file system feature) are excluded -# from the input. -# The default value is: NO. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. -# -# Note that the wildcards are matched against the file with absolute path, so to -# exclude all test directories for example use the pattern */test/* - -EXCLUDE_PATTERNS = */utilities/*/*.md - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test -# -# Note that the wildcards are matched against the file with absolute path, so to -# exclude all test directories use the pattern */test/* - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or directories -# that contain example code fragments that are included (see the \include -# command). - -EXAMPLE_PATH = biblio/ \ - example/ \ - utilities/ - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and -# *.h) to filter out the source-files in the directories. If left blank all -# files are included. - -EXAMPLE_PATTERNS = - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude commands -# irrespective of the value of the RECURSIVE tag. -# The default value is: NO. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or directories -# that contain images that are to be included in the documentation (see the -# \image command). - -IMAGE_PATH = doc/Skeleton_blocker/ \ - doc/Alpha_complex/ \ - doc/common/ \ - doc/Cech_complex/ \ - doc/Contraction/ \ - doc/Simplex_tree/ \ - doc/Persistent_cohomology/ \ - doc/Witness_complex/ \ - doc/Bitmap_cubical_complex/ \ - doc/Rips_complex/ \ - doc/Subsampling/ \ - doc/Spatial_searching/ \ - doc/Tangential_complex/ \ - doc/Bottleneck_distance/ \ - doc/Nerve_GIC/ \ - doc/Persistence_representations/ \ - doc/Kernels/ - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command: -# -# -# -# where is the value of the INPUT_FILTER tag, and is the -# name of an input file. Doxygen will then use the output that the filter -# program writes to standard output. If FILTER_PATTERNS is specified, this tag -# will be ignored. -# -# Note that the filter must not add or remove lines; it is applied before the -# code is scanned, but not when the output code is generated. If lines are added -# or removed, the anchors will not be placed correctly. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. The filters are a list of the form: pattern=filter -# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how -# filters are used. If the FILTER_PATTERNS tag is empty or if none of the -# patterns match the file name, INPUT_FILTER is applied. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER ) will also be used to filter the input files that are used for -# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). -# The default value is: NO. - -FILTER_SOURCE_FILES = NO - -# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file -# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and -# it is also possible to disable source filtering for a specific pattern using -# *.ext= (so without naming a filter). -# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. - -FILTER_SOURCE_PATTERNS = - -# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that -# is part of the input, its contents will be placed on the main page -# (index.html). This can be useful if you have a project on for instance GitHub -# and want to reuse the introduction page also for the doxygen output. - -USE_MDFILE_AS_MAINPAGE = - -#--------------------------------------------------------------------------- -# Configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will be -# generated. Documented entities will be cross-referenced with these sources. -# -# Note: To get rid of all source code in the generated output, make sure that -# also VERBATIM_HEADERS is set to NO. -# The default value is: NO. - -SOURCE_BROWSER = NO - -# Setting the INLINE_SOURCES tag to YES will include the body of functions, -# classes and enums directly into the documentation. -# The default value is: NO. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any -# special comment blocks from generated source code fragments. Normal C, C++ and -# Fortran comments will always remain visible. -# The default value is: YES. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES then for each documented -# function all documented functions referencing it will be listed. -# The default value is: NO. - -REFERENCED_BY_RELATION = NO - -# If the REFERENCES_RELATION tag is set to YES then for each documented function -# all documented entities called/used by that function will be listed. -# The default value is: NO. - -REFERENCES_RELATION = NO - -# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set -# to YES, then the hyperlinks from functions in REFERENCES_RELATION and -# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will -# link to the documentation. -# The default value is: YES. - -REFERENCES_LINK_SOURCE = YES - -# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the -# source code will show a tooltip with additional information such as prototype, -# brief description and links to the definition and documentation. Since this -# will make the HTML file larger and loading of large files a bit slower, you -# can opt to disable this feature. -# The default value is: YES. -# This tag requires that the tag SOURCE_BROWSER is set to YES. - -SOURCE_TOOLTIPS = YES - -# If the USE_HTAGS tag is set to YES then the references to source code will -# point to the HTML generated by the htags(1) tool instead of doxygen built-in -# source browser. The htags tool is part of GNU's global source tagging system -# (see http://www.gnu.org/software/global/global.html). You will need version -# 4.8.6 or higher. -# -# To use it do the following: -# - Install the latest version of global -# - Enable SOURCE_BROWSER and USE_HTAGS in the config file -# - Make sure the INPUT points to the root of the source tree -# - Run doxygen as normal -# -# Doxygen will invoke htags (and that will in turn invoke gtags), so these -# tools must be available from the command line (i.e. in the search path). -# -# The result: instead of the source browser generated by doxygen, the links to -# source code will now point to the output of htags. -# The default value is: NO. -# This tag requires that the tag SOURCE_BROWSER is set to YES. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a -# verbatim copy of the header file for each class for which an include is -# specified. Set to NO to disable this. -# See also: Section \class. -# The default value is: YES. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# Configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all -# compounds will be generated. Enable this if the project contains a lot of -# classes, structs, unions or interfaces. -# The default value is: YES. - -ALPHABETICAL_INDEX = YES - -# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in -# which the alphabetical index list will be split. -# Minimum value: 1, maximum value: 20, default value: 5. -# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all classes will -# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag -# can be used to specify a prefix (or a list of prefixes) that should be ignored -# while generating the index headers. -# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output -# The default value is: YES. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a -# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of -# it. -# The default directory is: html. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each -# generated HTML page (for example: .htm, .php, .asp). -# The default value is: .html. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a user-defined HTML header file for -# each generated HTML page. If the tag is left blank doxygen will generate a -# standard header. -# -# To get valid HTML the header file that includes any scripts and style sheets -# that doxygen needs, which is dependent on the configuration options used (e.g. -# the setting GENERATE_TREEVIEW). It is highly recommended to start with a -# default header using -# doxygen -w html new_header.html new_footer.html new_stylesheet.css -# YourConfigFile -# and then modify the file new_header.html. See also section "Doxygen usage" -# for information on how to generate the default header that doxygen normally -# uses. -# Note: The header is subject to change so you typically have to regenerate the -# default header when upgrading to a newer version of doxygen. For a description -# of the possible markers and block names see the documentation. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_HEADER = doc/common/header.html - -# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each -# generated HTML page. If the tag is left blank doxygen will generate a standard -# footer. See HTML_HEADER for more information on how to generate a default -# footer and what special commands can be used inside the footer. See also -# section "Doxygen usage" for information on how to generate the default footer -# that doxygen normally uses. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_FOOTER = doc/common/footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style -# sheet that is used by each HTML page. It can be used to fine-tune the look of -# the HTML output. If left blank doxygen will generate a default style sheet. -# See also section "Doxygen usage" for information on how to generate the style -# sheet that doxygen normally uses. -# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as -# it is more robust and this tag (HTML_STYLESHEET) will in the future become -# obsolete. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_STYLESHEET = doc/common/stylesheet.css - -# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- -# defined cascading style sheet that is included after the standard style sheets -# created by doxygen. Using this option one can overrule certain style aspects. -# This is preferred over using HTML_STYLESHEET since it does not replace the -# standard style sheet and is therefor more robust against future updates. -# Doxygen will copy the style sheet file to the output directory. For an example -# see the documentation. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_EXTRA_STYLESHEET = - -# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or -# other source files which should be copied to the HTML output directory. Note -# that these files will be copied to the base HTML output directory. Use the -# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these -# files. In the HTML_STYLESHEET file, use the file name only. Also note that the -# files will be copied as-is; there are no commands or markers available. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_EXTRA_FILES = - -# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen -# will adjust the colors in the stylesheet and background images according to -# this color. Hue is specified as an angle on a colorwheel, see -# http://en.wikipedia.org/wiki/Hue for more information. For instance the value -# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 -# purple, and 360 is red again. -# Minimum value: 0, maximum value: 359, default value: 220. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_HUE = 220 - -# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors -# in the HTML output. For a value of 0 the output will use grayscales only. A -# value of 255 will produce the most vivid colors. -# Minimum value: 0, maximum value: 255, default value: 100. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_SAT = 100 - -# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the -# luminance component of the colors in the HTML output. Values below 100 -# gradually make the output lighter, whereas values above 100 make the output -# darker. The value divided by 100 is the actual gamma applied, so 80 represents -# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not -# change the gamma. -# Minimum value: 40, maximum value: 240, default value: 80. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_COLORSTYLE_GAMMA = 80 - -# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML -# page will contain the date and time when the page was generated. Setting this -# to NO can help when comparing the output of multiple runs. -# The default value is: YES. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_TIMESTAMP = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_DYNAMIC_SECTIONS = NO - -# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries -# shown in the various tree structured indices initially; the user can expand -# and collapse entries dynamically later on. Doxygen will expand the tree to -# such a level that at most the specified number of entries are visible (unless -# a fully collapsed tree already exceeds this amount). So setting the number of -# entries 1 will produce a full collapsed tree by default. 0 is a special value -# representing an infinite number of entries and will result in a full expanded -# tree by default. -# Minimum value: 0, maximum value: 9999, default value: 100. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_INDEX_NUM_ENTRIES = 100 - -# If the GENERATE_DOCSET tag is set to YES, additional index files will be -# generated that can be used as input for Apple's Xcode 3 integrated development -# environment (see: http://developer.apple.com/tools/xcode/), introduced with -# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a -# Makefile in the HTML output directory. Running make will produce the docset in -# that directory and running make install will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at -# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_DOCSET = NO - -# This tag determines the name of the docset feed. A documentation feed provides -# an umbrella under which multiple documentation sets from a single provider -# (such as a company or product suite) can be grouped. -# The default value is: Doxygen generated docs. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# This tag specifies a string that should uniquely identify the documentation -# set bundle. This should be a reverse domain-name style string, e.g. -# com.mycompany.MyDocSet. Doxygen will append .docset to the name. -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify -# the documentation publisher. This should be a reverse domain-name style -# string, e.g. com.mycompany.MyDocSet.documentation. -# The default value is: org.doxygen.Publisher. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_PUBLISHER_ID = org.doxygen.Publisher - -# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. -# The default value is: Publisher. -# This tag requires that the tag GENERATE_DOCSET is set to YES. - -DOCSET_PUBLISHER_NAME = Publisher - -# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three -# additional HTML index files: index.hhp, index.hhc, and index.hhk. The -# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop -# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on -# Windows. -# -# The HTML Help Workshop contains a compiler that can convert all HTML output -# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML -# files are now used as the Windows 98 help format, and will replace the old -# Windows help format (.hlp) on all Windows platforms in the future. Compressed -# HTML files also contain an index, a table of contents, and you can search for -# words in the documentation. The HTML workshop also contains a viewer for -# compressed HTML files. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_HTMLHELP = NO - -# The CHM_FILE tag can be used to specify the file name of the resulting .chm -# file. You can add a path in front of the file if the result should not be -# written to the html output directory. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -CHM_FILE = - -# The HHC_LOCATION tag can be used to specify the location (absolute path -# including file name) of the HTML help compiler ( hhc.exe). If non-empty -# doxygen will try to run the HTML help compiler on the generated index.hhp. -# The file has to be specified with full path. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -HHC_LOCATION = - -# The GENERATE_CHI flag controls if a separate .chi index file is generated ( -# YES) or that it should be included in the master .chm file ( NO). -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -GENERATE_CHI = NO - -# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) -# and project file content. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -CHM_INDEX_ENCODING = - -# The BINARY_TOC flag controls whether a binary table of contents is generated ( -# YES) or a normal table of contents ( NO) in the .chm file. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members to -# the table of contents of the HTML help documentation and to the tree view. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTMLHELP is set to YES. - -TOC_EXPAND = NO - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and -# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that -# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help -# (.qch) of the generated HTML documentation. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify -# the file name of the resulting .qch file. The path specified is relative to -# the HTML output folder. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help -# Project output. For more information please see Qt Help Project / Namespace -# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_NAMESPACE = org.doxygen.Project - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt -# Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- -# folders). -# The default value is: doc. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_VIRTUAL_FOLDER = doc - -# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom -# filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- -# filters). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the -# custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- -# filters). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this -# project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHP_SECT_FILTER_ATTRS = - -# The QHG_LOCATION tag can be used to specify the location of Qt's -# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the -# generated .qhp file. -# This tag requires that the tag GENERATE_QHP is set to YES. - -QHG_LOCATION = - -# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be -# generated, together with the HTML files, they form an Eclipse help plugin. To -# install this plugin and make it available under the help contents menu in -# Eclipse, the contents of the directory containing the HTML and XML files needs -# to be copied into the plugins directory of eclipse. The name of the directory -# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. -# After copying Eclipse needs to be restarted before the help appears. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_ECLIPSEHELP = NO - -# A unique identifier for the Eclipse help plugin. When installing the plugin -# the directory name containing the HTML and XML files should also have this -# name. Each documentation set should have its own identifier. -# The default value is: org.doxygen.Project. -# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. - -ECLIPSE_DOC_ID = org.doxygen.Project - -# If you want full control over the layout of the generated HTML pages it might -# be necessary to disable the index and replace it with your own. The -# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top -# of each HTML page. A value of NO enables the index and the value YES disables -# it. Since the tabs in the index contain the same information as the navigation -# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -DISABLE_INDEX = YES - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. If the tag -# value is set to YES, a side panel will be generated containing a tree-like -# index structure (just like the one that is generated for HTML Help). For this -# to work a browser that supports JavaScript, DHTML, CSS and frames is required -# (i.e. any modern browser). Windows users are probably better off using the -# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can -# further fine-tune the look of the index. As an example, the default style -# sheet generated by doxygen has an example that shows how to put an image at -# the root of the tree instead of the PROJECT_NAME. Since the tree basically has -# the same information as the tab index, you could consider setting -# DISABLE_INDEX to YES when enabling this option. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -GENERATE_TREEVIEW = YES - -# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that -# doxygen will group on one line in the generated HTML documentation. -# -# Note that a value of 0 will completely suppress the enum values from appearing -# in the overview section. -# Minimum value: 0, maximum value: 20, default value: 4. -# This tag requires that the tag GENERATE_HTML is set to YES. - -ENUM_VALUES_PER_LINE = 4 - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used -# to set the initial width (in pixels) of the frame in which the tree is shown. -# Minimum value: 0, maximum value: 1500, default value: 250. -# This tag requires that the tag GENERATE_HTML is set to YES. - -TREEVIEW_WIDTH = 250 - -# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to -# external symbols imported via tag files in a separate window. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -EXT_LINKS_IN_WINDOW = NO - -# Use this tag to change the font size of LaTeX formulas included as images in -# the HTML documentation. When you change the font size after a successful -# doxygen run you need to manually remove any form_*.png images from the HTML -# output directory to force them to be regenerated. -# Minimum value: 8, maximum value: 50, default value: 10. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_FONTSIZE = 10 - -# Use the FORMULA_TRANPARENT tag to determine whether or not the images -# generated for formulas are transparent PNGs. Transparent PNGs are not -# supported properly for IE 6.0, but are supported on all modern browsers. -# -# Note that when changing this option you need to delete any form_*.png files in -# the HTML output directory before the changes have effect. -# The default value is: YES. -# This tag requires that the tag GENERATE_HTML is set to YES. - -FORMULA_TRANSPARENT = YES - -# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# http://www.mathjax.org) which uses client side Javascript for the rendering -# instead of using prerendered bitmaps. Use this if you do not have LaTeX -# installed or if you want to formulas look prettier in the HTML output. When -# enabled you may also need to install MathJax separately and configure the path -# to it using the MATHJAX_RELPATH option. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -USE_MATHJAX = YES - -# When MathJax is enabled you can set the default output format to be used for -# the MathJax output. See the MathJax site (see: -# http://docs.mathjax.org/en/latest/output.html) for more details. -# Possible values are: HTML-CSS (which is slower, but has the best -# compatibility), NativeMML (i.e. MathML) and SVG. -# The default value is: HTML-CSS. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_FORMAT = HTML-CSS - -# When MathJax is enabled you need to specify the location relative to the HTML -# output directory using the MATHJAX_RELPATH option. The destination directory -# should contain the MathJax.js script. For instance, if the mathjax directory -# is located at the same level as the HTML output directory, then -# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax -# Content Delivery Network so you can quickly see the result without installing -# MathJax. However, it is strongly recommended to install a local copy of -# MathJax from http://www.mathjax.org before deployment. -# The default value is: http://cdn.mathjax.org/mathjax/latest. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_RELPATH = ../common - -# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax -# extension names that should be enabled during MathJax rendering. For example -# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols - -# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces -# of code that will be used on startup of the MathJax code. See the MathJax site -# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an -# example see the documentation. -# This tag requires that the tag USE_MATHJAX is set to YES. - -MATHJAX_CODEFILE = - -# When the SEARCHENGINE tag is enabled doxygen will generate a search box for -# the HTML output. The underlying search engine uses javascript and DHTML and -# should work on any modern browser. Note that when using HTML help -# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) -# there is already a search function so this one should typically be disabled. -# For large projects the javascript based search engine can be slow, then -# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to -# search using the keyboard; to jump to the search box use + S -# (what the is depends on the OS and browser, but it is typically -# , /
- - - - -
- Author: Vincent Rouvreau
- Introduced in: GUDHI 1.3.0
- Copyright: GPL v3
- Requires: \ref eigen3 and
- \ref cgal ≥ 4.7.0 for Alpha_complex
- \ref cgal ≥ 4.11.0 for Alpha_complex_3d -
- Alpha_complex is a simplicial complex constructed from the finite cells of a Delaunay Triangulation.
- The filtration value of each simplex is computed as the square of the circumradius of the simplex if the - circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration - values of the codimension 1 cofaces that make it not Gabriel otherwise. - All simplices that have a filtration value strictly greater than a given alpha squared value are not inserted into - the complex.
- User manual: \ref alpha_complex - Reference manual: Gudhi::alpha_complex::Alpha_complex and - Gudhi::alpha_complex::Alpha_complex_3d -
- \subsection CechComplexDataStructure Čech complex - \image html "cech_complex_representation.png" "Čech complex representation" - - - - - -
- Author: Vincent Rouvreau
- Introduced in: GUDHI 2.2.0
- Copyright: GPL v3
-
- The Čech complex is a simplicial complex constructed from a proximity graph.
- The set of all simplices is filtered by the radius of their minimal enclosing ball.
- User manual: \ref cech_complex - Reference manual: Gudhi::cech_complex::Cech_complex -
- \subsection CubicalComplexDataStructure Cubical complex - \image html "Cubical_complex_representation.png" "Cubical complex representation" - - - - - -
- Author: Pawel Dlotko
- Introduced in: GUDHI 1.3.0
- Copyright: GPL v3
-
- The cubical complex is an example of a structured complex useful in computational mathematics (specially - rigorous numerics) and image analysis.
- User manual: \ref cubical_complex - Reference manual: Gudhi::cubical_complex::Bitmap_cubical_complex -
- \subsection RipsComplexDataStructure Rips complex - \image html "rips_complex_representation.png" "Rips complex representation" - - - - - -
- Author: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse
- Introduced in: GUDHI 2.0.0
- Copyright: GPL v3
-
- Rips_complex is a simplicial complex constructed from a one skeleton graph.
- The filtration value of each edge is computed from a user-given distance function and is inserted until a - user-given threshold value.
- This complex can be built from a point cloud and a distance function, or from a distance matrix.
- User manual: \ref rips_complex - Reference manual: Gudhi::rips_complex::Rips_complex -
- \subsection SimplexTreeDataStructure Simplex tree - \image html "Simplex_tree_representation.png" "Simplex tree representation" - - - - - -
- Author: Clément Maria
- Introduced in: GUDHI 1.0.0
- Copyright: GPL v3
-
- The simplex tree is an efficient and flexible - data structure for representing general (filtered) simplicial complexes. The data structure - is described in \cite boissonnatmariasimplextreealgorithmica .
- User manual: \ref simplex_tree - Reference manual: Gudhi::Simplex_tree -
- \subsection CoverComplexDataStructure Cover Complexes - \image html "gicvisu.jpg" "Graph Induced Complex of a point cloud." - - - - - -
- Author: Mathieu Carrière
- Introduced in: GUDHI 2.1.0
- Copyright: GPL v3
- Requires: \ref cgal ≥ 4.8.1 -
- Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain - topological information about the input data. They can be computed with a cover of the - data, that comes i.e. from the preimage of a family of intervals covering the image - of a scalar-valued function defined on the data.
- User manual: \ref cover_complex - Reference manual: Gudhi::cover_complex::Cover_complex -
- \subsection SkeletonBlockerDataStructure Skeleton blocker - \image html "ds_representation.png" "Skeleton blocker representation" - - - - - -
- Author: David Salinas
- Introduced in: GUDHI 1.1.0
- Copyright: GPL v3
-
- The Skeleton-Blocker data-structure proposes a light encoding for simplicial complexes by storing only an *implicit* - representation of its simplices \cite socg_blockers_2011,\cite blockers2012. Intuitively, it just stores the - 1-skeleton of a simplicial complex with a graph and the set of its "missing faces" that is very small in practice. - This data-structure handles all simplicial complexes operations such as simplex enumeration or simplex removal but - operations that are particularly efficient are operations that do not require simplex enumeration such as edge - iteration, link computation or simplex contraction.
- User manual: \ref skbl - Reference manual: Gudhi::skeleton_blocker::Skeleton_blocker_complex -
- \subsection TangentialComplexDataStructure Tangential complex - \image html "tc_examples.png" "Tangential complex representation" - - - - - -
- Author: Clément Jamin
- Introduced in: GUDHI 2.0.0
- Copyright: GPL v3
- Requires: \ref cgal ≥ 4.8.1 and \ref eigen3 -
- A Tangential Delaunay complex is a simplicial complex - designed to reconstruct a \f$ k \f$-dimensional manifold embedded in \f$ d \f$-dimensional Euclidean space. - The input is a point sample coming from an unknown manifold. - The running time depends only linearly on the extrinsic dimension \f$ d \f$ - and exponentially on the intrinsic dimension \f$ k \f$.
- User manual: \ref tangential_complex - Reference manual: Gudhi::tangential_complex::Tangential_complex -
- \subsection ToplexMapDataStructure Toplex Map - \image html "map.png" "Toplex map representation" - - - - - - - \subsection WitnessComplexDataStructure Witness complex - \image html "Witness_complex_representation.png" "Witness complex representation" -
- Author: François Godi
- Introduced in: GUDHI 2.1.0
- Copyright: GPL v3
-
- The Toplex map data structure is composed firstly of a raw storage of toplices (the maximal simplices) - and secondly of a map which associate any vertex to a set of pointers toward all toplices - containing this vertex. - User manual: \ref toplex_map - Reference manual: Gudhi::Toplex_map -
- - - - -
- Author: Siargey Kachanovich
- Introduced in: GUDHI 1.3.0
- Copyright: GPL v3
- Euclidean version requires: \ref cgal ≥ 4.6.0 and \ref eigen3 -
- Witness complex \f$ Wit(W,L) \f$ is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$. - The data structure is described in \cite boissonnatmariasimplextreealgorithmica .
- User manual: \ref witness_complex - Reference manual: Gudhi::witness_complex::SimplicialComplexForWitness -
- - \section Toolbox Toolbox - - \subsection BottleneckDistanceToolbox Bottleneck distance - \image html "perturb_pd.png" "Bottleneck distance is the length of the longest edge" - - - - - -
- Author: François Godi
- Introduced in: GUDHI 2.0.0
- Copyright: GPL v3
- Requires: \ref cgal ≥ 4.8.1 -
- Bottleneck distance measures the similarity between two persistence diagrams. - It's the shortest distance b for which there exists a perfect matching between - the points of the two diagrams (+ all the diagonal points) such that - any couple of matched points are at distance at most b. -
- User manual: \ref bottleneck_distance -
- \subsection ContractionToolbox Contraction - \image html "sphere_contraction_representation.png" "Sphere contraction example" - - - - - -
- Author: David Salinas
- Introduced in: GUDHI 1.1.0
- Copyright: GPL v3
-
- The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge - simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most - used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice, - the size of this data-structure is even much lower than the total number of simplices.
- User manual: \ref contr -
- \subsection PersistentCohomologyToolbox Persistent Cohomology - \image html "3DTorus_poch.png" "Rips Persistent Cohomology on a 3D Torus" - - - - - -
- Author: Clément Maria
- Introduced in: GUDHI 1.0.0
- Copyright: GPL v3
-
- The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing - global topological features like connected components, holes, cavities, etc. Persistent homology studies the - evolution -- birth, life and death -- of these features when the topological space is changing. Consequently, the - theory is essentially composed of three elements: topological spaces, their homology groups and an evolution - scheme. - Computation of persistent cohomology using the algorithm of \cite DBLP:journals/dcg/SilvaMV11 and - \cite DBLP:journals/corr/abs-1208-5018 and the Compressed Annotation Matrix implementation of - \cite DBLP:conf/esa/BoissonnatDM13 .
- User manual: \ref persistent_cohomology - Reference manual: Gudhi::persistent_cohomology::Persistent_cohomology -
- \subsection PersistenceRepresentationsToolbox Persistence representations - \image html "average_landscape.png" "Persistence representations" - - - - - -
- Author: Pawel Dlotko
- Introduced in: GUDHI 2.1.0
- Copyright: GPL v3
-
- It contains implementation of various representations of persistence diagrams; diagrams themselves, persistence - landscapes (rigorous and grid version), persistence heath maps, vectors and others. It implements basic - functionalities which are neccessary to use persistence in statistics and machine learning.
- User manual: \ref Persistence_representations -
- -*/ diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md new file mode 100644 index 00000000..61efd582 --- /dev/null +++ b/src/common/doc/main_page.md @@ -0,0 +1,391 @@ +[TOC] + +# The C++ library {#main_page} +\image html "Gudhi_banner.png" +



+ +## Complexes {#Complexes} +### Cubical complex + + + + + + + + + + +
+ \image html "Cubical_complex_representation.png" + + The cubical complex is an example of a structured complex useful in computational mathematics (specially + rigorous numerics) and image analysis.
+
+ Author: Pawel Dlotko
+ Introduced in: GUDHI 1.3.0
+ Copyright: GPL v3
+
+ User manual: \ref cubical_complex - Reference manual: Gudhi::cubical_complex::Bitmap_cubical_complex +
+ +### Simplicial complex + +#### Alpha complex + + + + + + + + + + +
+ \image html "alpha_complex_representation.png" + + Alpha complex is a simplicial complex constructed from the finite cells of a Delaunay Triangulation.
+ The filtration value of each simplex is computed as the square of the circumradius of the simplex if the + circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration + values of the codimension 1 cofaces that make it not Gabriel otherwise. + All simplices that have a filtration value strictly greater than a given alpha squared value are not inserted into + the complex.
+
+ Author: Vincent Rouvreau
+ Introduced in: GUDHI 1.3.0
+ Copyright: GPL v3
+ Requires: \ref eigen3 and
+ \ref cgal ≥ 4.7.0 for Alpha_complex
+ \ref cgal ≥ 4.11.0 for Alpha_complex_3d +
+ User manual: \ref alpha_complex - Reference manual: Gudhi::alpha_complex::Alpha_complex and + Gudhi::alpha_complex::Alpha_complex_3d +
+ +#### Čech complex + + + + + + + + + + +
+ \image html "cech_complex_representation.png" + + The Čech complex is a simplicial complex constructed from a proximity graph. + The set of all simplices is filtered by the radius of their minimal enclosing ball. + + Author: Vincent Rouvreau
+ Introduced in: GUDHI 2.2.0
+ Copyright: GPL v3
+
+ User manual: \ref cech_complex - Reference manual: Gudhi::cech_complex::Cech_complex +
+ +#### Rips complex + + + + + + + + + + +
+ \image html "rips_complex_representation.png" + + Rips_complex is a simplicial complex constructed from a one skeleton graph.
+ The filtration value of each edge is computed from a user-given distance function and is inserted until a + user-given threshold value.
+ This complex can be built from a point cloud and a distance function, or from a distance matrix. +
+ Author: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse
+ Introduced in: GUDHI 2.0.0
+ Copyright: GPL v3
+
+ User manual: \ref rips_complex - Reference manual: Gudhi::rips_complex::Rips_complex +
+ +#### Witness complex + + + + + + + + + + +
+ \image html "Witness_complex_representation.png" + + Witness complex \f$ Wit(W,L) \f$ is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$. + The data structure is described in \cite boissonnatmariasimplextreealgorithmica . + + Author: Siargey Kachanovich
+ Introduced in: GUDHI 1.3.0
+ Copyright: GPL v3
+ Euclidean version requires: \ref cgal ≥ 4.6.0 and \ref eigen3 +
+ User manual: \ref witness_complex - Reference manual: Gudhi::witness_complex::SimplicialComplexForWitness +
+ +### Cover Complexes + + + + + + + + + +
+ \image html "gicvisu.jpg" + + Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain + topological information about the input data. They can be computed with a cover of the + data, that comes i.e. from the preimage of a family of intervals covering the image + of a scalar-valued function defined on the data.
+ User manual: \ref cover_complex - Reference manual: Gudhi::cover_complex::Cover_complex +
+ Author: Mathieu Carrière
+ Introduced in: GUDHI 2.1.0
+ Copyright: GPL v3
+ Requires: \ref cgal ≥ 4.8.1 +
+ User manual: \ref cover_complex - Reference manual: Gudhi::cover_complex::Cover_complex +
+ +## Data structures and basic operations {#DataStructuresAndBasicOperations} + +### Data structures + +#### Simplex tree + + + + + + + + + +
+ \image html "Simplex_tree_representation.png" + + The simplex tree is an efficient and flexible + data structure for representing general (filtered) simplicial complexes. The data structure + is described in \cite boissonnatmariasimplextreealgorithmica . + + Author: Clément Maria
+ Introduced in: GUDHI 1.0.0
+ Copyright: GPL v3
+
+ User manual: \ref simplex_tree - Reference manual: Gudhi::Simplex_tree +
+ +#### Skeleton blocker + + + + + + + + + + +
+ \image html "ds_representation.png" + + The Skeleton-Blocker data-structure proposes a light encoding for simplicial complexes by storing only an *implicit* + representation of its simplices \cite socg_blockers_2011,\cite blockers2012. Intuitively, it just stores the + 1-skeleton of a simplicial complex with a graph and the set of its "missing faces" that is very small in practice. + This data-structure handles all simplicial complexes operations such as simplex enumeration or simplex removal but + operations that are particularly efficient are operations that do not require simplex enumeration such as edge + iteration, link computation or simplex contraction. + + Author: David Salinas
+ Introduced in: GUDHI 1.1.0
+ Copyright: GPL v3
+
+ User manual: \ref skbl - Reference manual: Gudhi::skeleton_blocker::Skeleton_blocker_complex +
+ +#### Toplex Map + + + + + + + + + + +
+ \image html "map.png" + + The Toplex map data structure is composed firstly of a raw storage of toplices (the maximal simplices) + and secondly of a map which associate any vertex to a set of pointers toward all toplices + containing this vertex. + + Author: François Godi
+ Introduced in: GUDHI 2.1.0
+ Copyright: GPL v3
+
+ User manual: \ref toplex_map - Reference manual: Gudhi::Toplex_map +
+ +### Basic operations + +#### Contraction + + + + + + + + + + +
+ \image html "sphere_contraction_representation.png" + + Author: David Salinas
+ Introduced in: GUDHI 1.1.0
+ Copyright: GPL v3
+
+ The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge + simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most + used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice, + the size of this data-structure is even much lower than the total number of simplices. +
+ User manual: \ref contr +
+ +## Topological descriptors computation {#TopologicalDescriptorsComputation} + +### Persistent Cohomology + + + + + + + + + + +
+ \image html "3DTorus_poch.png" + + The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing + global topological features like connected components, holes, cavities, etc. Persistent homology studies the + evolution -- birth, life and death -- of these features when the topological space is changing. Consequently, the + theory is essentially composed of three elements: topological spaces, their homology groups and an evolution + scheme. + Computation of persistent cohomology using the algorithm of \cite DBLP:journals/dcg/SilvaMV11 and + \cite DBLP:journals/corr/abs-1208-5018 and the Compressed Annotation Matrix implementation of + \cite DBLP:conf/esa/BoissonnatDM13 . + + Author: Clément Maria
+ Introduced in: GUDHI 1.0.0
+ Copyright: GPL v3
+
+ User manual: \ref persistent_cohomology - Reference manual: Gudhi::persistent_cohomology::Persistent_cohomology +
+ +## Manifold reconstruction {#ManifoldReconstruction} + +### Tangential complex + + + + + + + + + + +
+ \image html "tc_examples.png" + + A Tangential Delaunay complex is a simplicial complex + designed to reconstruct a \f$ k \f$-dimensional manifold embedded in \f$ d \f$-dimensional Euclidean space. + The input is a point sample coming from an unknown manifold. + The running time depends only linearly on the extrinsic dimension \f$ d \f$ + and exponentially on the intrinsic dimension \f$ k \f$. + + Author: Clément Jamin
+ Introduced in: GUDHI 2.0.0
+ Copyright: GPL v3
+ Requires: \ref cgal ≥ 4.8.1 and \ref eigen3 +
+ User manual: \ref tangential_complex - Reference manual: Gudhi::tangential_complex::Tangential_complex +
+ +## Topological descriptors tools {#TopologicalDescriptorsTools} + +### Bottleneck distance + + + + + + + + + + +
+ \image html "perturb_pd.png" + + Bottleneck distance measures the similarity between two persistence diagrams. + It's the shortest distance b for which there exists a perfect matching between + the points of the two diagrams (+ all the diagonal points) such that + any couple of matched points are at distance at most b. + + Author: François Godi
+ Introduced in: GUDHI 2.0.0
+ Copyright: GPL v3
+ Requires: \ref cgal ≥ 4.8.1 +
+ User manual: \ref bottleneck_distance +
+ +### Persistence representations + + + + + + + + + + +
+ \image html "average_landscape.png" + + It contains implementation of various representations of persistence diagrams; diagrams themselves, persistence + landscapes (rigorous and grid version), persistence heath maps, vectors and others. It implements basic + functionalities which are neccessary to use persistence in statistics and machine learning. + + Author: Pawel Dlotko
+ Introduced in: GUDHI 2.1.0
+ Copyright: GPL v3
+
+ User manual: \ref Persistence_representations +
-- cgit v1.2.3 From 617129f0e45e4a019b5d6facb8d2679629e34efd Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 28 May 2019 22:26:03 +0200 Subject: fix typo --- src/common/doc/main_page.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index 61efd582..a74ebd26 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -95,7 +95,7 @@ \image html "rips_complex_representation.png" - Rips_complex is a simplicial complex constructed from a one skeleton graph.
+ Rips complex is a simplicial complex constructed from a one skeleton graph.
The filtration value of each edge is computed from a user-given distance function and is inserted until a user-given threshold value.
This complex can be built from a point cloud and a distance function, or from a distance matrix. -- cgit v1.2.3 From a99f1e8ceef3ccb8606ac6f5af169329db8352f4 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 29 May 2019 11:43:46 +0200 Subject: Reformat modules summary and occupy 100% of the page --- src/cython/doc/alpha_complex_sum.inc | 40 ++++++++--------- src/cython/doc/bottleneck_distance_sum.inc | 27 ++++++------ src/cython/doc/conf.py | 2 +- src/cython/doc/cubical_complex_sum.inc | 27 ++++++------ src/cython/doc/nerve_gic_complex_sum.rst | 29 ++++++------ src/cython/doc/persistence_graphical_tools_sum.inc | 24 +++++----- src/cython/doc/persistent_cohomology_sum.inc | 51 +++++++++++----------- src/cython/doc/rips_complex_sum.inc | 31 +++++++------ src/cython/doc/simplex_tree_sum.inc | 25 +++++------ src/cython/doc/tangential_complex_sum.inc | 27 ++++++------ src/cython/doc/witness_complex_sum.inc | 34 +++++++-------- 11 files changed, 155 insertions(+), 162 deletions(-) diff --git a/src/cython/doc/alpha_complex_sum.inc b/src/cython/doc/alpha_complex_sum.inc index 1680a712..100edbbd 100644 --- a/src/cython/doc/alpha_complex_sum.inc +++ b/src/cython/doc/alpha_complex_sum.inc @@ -1,22 +1,20 @@ -================================================================= =================================== =================================== -:Author: Vincent Rouvreau :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -:Requires: CGAL :math:`\geq` 4.7.0 Eigen3 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| .. figure:: | Alpha_complex is a simplicial complex constructed from the finite | -| ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | -| :alt: Alpha complex representation | | -| :figclass: align-center | The filtration value of each simplex is computed as the square of the | -| | circumradius of the simplex if the circumsphere is empty (the simplex | -| Alpha complex representation | is then said to be Gabriel), and as the minimum of the filtration | -| | values of the codimension 1 cofaces that make it not Gabriel | -| | otherwise. All simplices that have a filtration value strictly | -| | greater than a given alpha squared value are not inserted into the | -| | complex. | -| | | -| | This package requires having CGAL version 4.7 or higher (4.8.1 is | -| | advised for better performance). | -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| :doc:`alpha_complex_user` | :doc:`alpha_complex_ref` | -+----------------------------------------------------------------+------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------+ + | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | + | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | + | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The filtration value of each simplex is computed as the square of the | | + | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: GPL v3 | + | | is then said to be Gabriel), and as the minimum of the filtration | | + | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: Eigen3 and CGAL :math:`\geq` 4.7.0 | + | | otherwise. All simplices that have a filtration value strictly | | + | | greater than a given alpha squared value are not inserted into the | | + | | complex. | | + | | | | + | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | + | | advised for better performance). | | + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------+ + | :doc:`alpha_complex_user` | :doc:`alpha_complex_ref` | + +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/bottleneck_distance_sum.inc b/src/cython/doc/bottleneck_distance_sum.inc index 030fad9e..4fedb744 100644 --- a/src/cython/doc/bottleneck_distance_sum.inc +++ b/src/cython/doc/bottleneck_distance_sum.inc @@ -1,15 +1,14 @@ -================================================================= =================================== =================================== -:Author: François Godi :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -:Requires: CGAL :math:`\geq` 4.8.0 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+-----------------------------------------------------------------+----------------------------------------------------------------------+ -| .. figure:: | Bottleneck distance measures the similarity between two persistence | -| ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | -| :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | -| | diagonal points) such that any couple of matched points are at | -| Bottleneck distance is the length of | distance at most b. | -| the longest edge | | -+-----------------------------------------------------------------+----------------------------------------------------------------------+ -| :doc:`bottleneck_distance_user` | | -+-----------------------------------------------------------------+----------------------------------------------------------------------+ + +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ + | .. figure:: | Bottleneck distance measures the similarity between two persistence | :Author: François Godi | + | ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | | + | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 | + | | diagonal points) such that any couple of matched points are at | | + | Bottleneck distance is the length of | distance at most b. | :Copyright: GPL v3 | + | the longest edge | | | + | | | :Requires: CGAL :math:`\geq` 4.8.0 | + +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ + | :doc:`bottleneck_distance_user` | | | + +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ diff --git a/src/cython/doc/conf.py b/src/cython/doc/conf.py index 4a54d4fd..ce08f679 100755 --- a/src/cython/doc/conf.py +++ b/src/cython/doc/conf.py @@ -125,7 +125,7 @@ html_theme_options = { "sidebarbgcolor": "#A1ADCD", "sidebartextcolor": "black", "sidebarlinkcolor": "#334D5C", - "body_max_width": "1200px", + "body_max_width": "100%", } # Add any paths that contain custom themes here, relative to this directory. diff --git a/src/cython/doc/cubical_complex_sum.inc b/src/cython/doc/cubical_complex_sum.inc index 280ad0e0..6dcc1fda 100644 --- a/src/cython/doc/cubical_complex_sum.inc +++ b/src/cython/doc/cubical_complex_sum.inc @@ -1,15 +1,14 @@ -================================================================= =================================== =================================== -:Author: Pawel Dlotko :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+--------------------------------------------------------------------------+----------------------------------------------------------------------+ -| .. figure:: | The cubical complex is an example of a structured complex useful in | -| ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | -| :alt: Cubical complex representation | analysis. | -| :figclass: align-center | | -| | | -| Cubical complex representation | | -+--------------------------------------------------------------------------+----------------------------------------------------------------------+ -| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | -| | * :doc:`periodic_cubical_complex_ref` | -+--------------------------------------------------------------------------+----------------------------------------------------------------------+ + +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ + | .. figure:: | The cubical complex is an example of a structured complex useful in | :Author: Pawel Dlotko | + | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | | + | :alt: Cubical complex representation | analysis. | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | | | + | | | :Copyright: GPL v3 | + | | | | + +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ + | :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | + | | * :doc:`periodic_cubical_complex_ref` | + +--------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/nerve_gic_complex_sum.rst b/src/cython/doc/nerve_gic_complex_sum.rst index 523c119f..47bf84fd 100644 --- a/src/cython/doc/nerve_gic_complex_sum.rst +++ b/src/cython/doc/nerve_gic_complex_sum.rst @@ -1,15 +1,16 @@ -================================================================= =================================== =================================== -:Author: Mathieu Carrière :Introduced in: GUDHI 2.3.0 :Copyright: GPL v3 -:Requires: CGAL :math:`\geq` 4.8.1 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | -| ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | -| :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | -| :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | -| | the image of a scalar-valued function defined on the data. | -| Graph Induced Complex of a point cloud. | | -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| :doc:`nerve_gic_complex_user` | :doc:`nerve_gic_complex_ref` | -+----------------------------------------------------------------+------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ + | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière | + | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | | + | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 | + | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | | + | | the image of a scalar-valued function defined on the data. | :Copyright: GPL v3 | + | | | | + | | | :Requires: CGAL :math:`\geq` 4.8.1 | + | | | | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ + | :doc:`nerve_gic_complex_user` | :doc:`nerve_gic_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/persistence_graphical_tools_sum.inc b/src/cython/doc/persistence_graphical_tools_sum.inc index 5577cf99..ee164652 100644 --- a/src/cython/doc/persistence_graphical_tools_sum.inc +++ b/src/cython/doc/persistence_graphical_tools_sum.inc @@ -1,12 +1,14 @@ -================================================================= =================================== =================================== -:Author: Vincent Rouvreau :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -:Requires: matplotlib numpy scipy -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ -| .. figure:: | These graphical tools comes on top of persistence results and allows | -| img/graphical_tools_representation.png | the user to build easily persistence barcode, diagram or density. | -| | | -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ -| :doc:`persistence_graphical_tools_user` | :doc:`persistence_graphical_tools_ref` | -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | .. figure:: | These graphical tools comes on top of persistence results and allows | :Author: Vincent Rouvreau | + | img/graphical_tools_representation.png | the user to build easily persistence barcode, diagram or density. | | + | | | :Introduced in: GUDHI 2.0.0 | + | | | | + | | | :Copyright: GPL v3 | + | | | | + | | | :Requires: matplotlib, numpy and scipy | + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | :doc:`persistence_graphical_tools_user` | :doc:`persistence_graphical_tools_ref` | | + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ diff --git a/src/cython/doc/persistent_cohomology_sum.inc b/src/cython/doc/persistent_cohomology_sum.inc index a26df1dc..5bf1a7bf 100644 --- a/src/cython/doc/persistent_cohomology_sum.inc +++ b/src/cython/doc/persistent_cohomology_sum.inc @@ -1,27 +1,26 @@ -================================================================= =================================== =================================== -:Author: Clément Maria :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ -| .. figure:: | The theory of homology consists in attaching to a topological space | -| ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | -| :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | -| | homology studies the evolution -- birth, life and death -- of these | -| Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | -| Torus | theory is essentially composed of three elements: topological spaces, | -| | their homology groups and an evolution scheme. | -| | | -| | Computation of persistent cohomology using the algorithm of | -| | :cite:`DBLP:journals/dcg/SilvaMV11` and | -| | :cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed | -| | Annotation Matrix implementation of | -| | :cite:`DBLP:conf/esa/BoissonnatDM13`. | -| | | -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ -| :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | -| | feature for reference: | -| | | -| | * :doc:`simplex_tree_ref` | -| | * :doc:`cubical_complex_ref` | -| | * :doc:`periodic_cubical_complex_ref` | -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | .. figure:: | The theory of homology consists in attaching to a topological space | :Author: Clément Maria | + | ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | | + | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Introduced in: GUDHI 2.0.0 | + | | homology studies the evolution -- birth, life and death -- of these | | + | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :Copyright: GPL v3 | + | Torus | theory is essentially composed of three elements: topological spaces, | | + | | their homology groups and an evolution scheme. | | + | | | | + | | Computation of persistent cohomology using the algorithm of | | + | | :cite:`DBLP:journals/dcg/SilvaMV11` and | | + | | :cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed | | + | | Annotation Matrix implementation of | | + | | :cite:`DBLP:conf/esa/BoissonnatDM13`. | | + | | | | + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | | + | | feature for reference: | | + | | | | + | | * :doc:`simplex_tree_ref` | | + | | * :doc:`cubical_complex_ref` | | + | | * :doc:`periodic_cubical_complex_ref` | | + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ diff --git a/src/cython/doc/rips_complex_sum.inc b/src/cython/doc/rips_complex_sum.inc index ea26769a..b32b810e 100644 --- a/src/cython/doc/rips_complex_sum.inc +++ b/src/cython/doc/rips_complex_sum.inc @@ -1,17 +1,16 @@ -===================================================================== =========================== =================================== -:Author: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -===================================================================== =========================== =================================== +.. table:: + :widths: 30 50 20 -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton | -| ../../doc/Rips_complex/rips_complex_representation.png | graph. | -| :figclass: align-center | | -| | The filtration value of each edge is computed from a user-given | -| Rips complex representation | distance function and is inserted until a user-given threshold | -| | value. | -| | | -| | This complex can be built from a point cloud and a distance function, | -| | or from a distance matrix. | -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| :doc:`rips_complex_user` | :doc:`rips_complex_ref` | -+----------------------------------------------------------------+------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ + | .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse | + | ../../doc/Rips_complex/rips_complex_representation.png | graph. | | + | :figclass: align-center | | :Introduced in: GUDHI 2.0.0 | + | | The filtration value of each edge is computed from a user-given | | + | | distance function and is inserted until a user-given threshold | :Copyright: GPL v3 | + | | value. | | + | | | | + | | This complex can be built from a point cloud and a distance function, | | + | | or from a distance matrix. | | + +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ + | :doc:`rips_complex_user` | :doc:`rips_complex_ref` | + +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/simplex_tree_sum.inc b/src/cython/doc/simplex_tree_sum.inc index fb0e54c1..7e10e366 100644 --- a/src/cython/doc/simplex_tree_sum.inc +++ b/src/cython/doc/simplex_tree_sum.inc @@ -1,14 +1,13 @@ -================================================================= =================================== =================================== -:Author: Clément Maria :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| .. figure:: | The simplex tree is an efficient and flexible data structure for | -| ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | -| :alt: Simplex tree representation | | -| :figclass: align-center | The data structure is described in | -| | :cite:`boissonnatmariasimplextreealgorithmica` | -| Simplex tree representation | | -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| :doc:`simplex_tree_user` | :doc:`simplex_tree_ref` | -+----------------------------------------------------------------+------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ + | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria | + | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | | + | :alt: Simplex tree representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The data structure is described in | | + | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: GPL v3 | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ + | :doc:`simplex_tree_user` | :doc:`simplex_tree_ref` | + +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/tangential_complex_sum.inc b/src/cython/doc/tangential_complex_sum.inc index 72b4d7ba..db1c5ab1 100644 --- a/src/cython/doc/tangential_complex_sum.inc +++ b/src/cython/doc/tangential_complex_sum.inc @@ -1,15 +1,14 @@ -================================================================= =================================== =================================== -:Author: Clément Jamin :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -:Requires: CGAL :math:`\geq` 4.8.0 Eigen3 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | -| ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | -| :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | -| | an unknown manifold. The running time depends only linearly on the | -| Tangential complex representation | extrinsic dimension :math:`d` and exponentially on the intrinsic | -| | dimension :math:`k`. | -+----------------------------------------------------------------+------------------------------------------------------------------------+ -| :doc:`tangential_complex_user` | :doc:`tangential_complex_ref` | -+----------------------------------------------------------------+------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ + | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | + | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | + | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 | + | | an unknown manifold. The running time depends only linearly on the | | + | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: GPL v3 | + | | dimension :math:`k`. | | + | | | :Requires: CGAL :math:`\geq` 4.8.0 | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ + | :doc:`tangential_complex_user` | :doc:`tangential_complex_ref` | | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ diff --git a/src/cython/doc/witness_complex_sum.inc b/src/cython/doc/witness_complex_sum.inc index a8a126a0..9dc9aef6 100644 --- a/src/cython/doc/witness_complex_sum.inc +++ b/src/cython/doc/witness_complex_sum.inc @@ -1,19 +1,17 @@ -================================================================= =================================== =================================== -:Author: Siargey Kachanovich :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -:Euclidean version requires: CGAL :math:`\geq` 4.6.0 Eigen3 -================================================================= =================================== =================================== +.. table:: + :widths: 30 50 20 -+-------------------------------------------------------------------+----------------------------------------------------------------------+ -| .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | -| ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | -| :alt: Witness complex representation | | -| :figclass: align-center | The data structure is described in | -| | :cite:`boissonnatmariasimplextreealgorithmica`. | -| | | -| Witness complex representation | | -+-------------------------------------------------------------------+----------------------------------------------------------------------+ -| :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | -| | * :doc:`strong_witness_complex_ref` | -| | * :doc:`euclidean_witness_complex_ref` | -| | * :doc:`euclidean_strong_witness_complex_ref` | -+-------------------------------------------------------------------+----------------------------------------------------------------------+ + +-------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------------------------------------+ + | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich | + | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | + | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The data structure is described in | | + | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: GPL v3 | + | | | | + | | | :Requires: Eigen3 and CGAL :math:`\geq` 4.6.0 for Euclidean versions only | + +-------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------------------------------------+ + | :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | | + | | * :doc:`strong_witness_complex_ref` | | + | | * :doc:`euclidean_witness_complex_ref` | | + | | * :doc:`euclidean_strong_witness_complex_ref` | | + +-------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------------------------------------+ -- cgit v1.2.3 From dc642f06e4c5174ecd2301eb7bcd1daeb7c4a2ec Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 29 May 2019 12:33:54 +0200 Subject: Rename nerve_gic_complex_sum.rst as nerve_gic_complex_sum.inc and use introduction sections --- src/cython/doc/index.rst | 79 ++++++++++++++----------------- src/cython/doc/nerve_gic_complex_sum.inc | 16 +++++++ src/cython/doc/nerve_gic_complex_sum.rst | 16 ------- src/cython/doc/nerve_gic_complex_user.rst | 2 +- 4 files changed, 53 insertions(+), 60 deletions(-) create mode 100644 src/cython/doc/nerve_gic_complex_sum.inc delete mode 100644 src/cython/doc/nerve_gic_complex_sum.rst diff --git a/src/cython/doc/index.rst b/src/cython/doc/index.rst index 15cbe267..e379bc23 100644 --- a/src/cython/doc/index.rst +++ b/src/cython/doc/index.rst @@ -6,80 +6,73 @@ GUDHI Python module documentation :alt: Gudhi banner :figclass: align-center -Introduction -************ - -The Python interface for the Gudhi library (Geometry Understanding in Higher -Dimensions) is a generic open source -`Python module `_, for -Computational Topology and Topological Data Analysis -(`TDA `_). -The GUDHI library intends to help the development of new algorithmic solutions -in TDA and their transfer to applications. It provides robust, efficient, -flexible and easy to use implementations of state-of-the-art algorithms and -data structures. - -The current release of the GUDHI library includes: +Complexes +********* -* Data structures to represent, construct and manipulate simplicial complexes. -* Simplification of simplicial complexes by edge contraction. -* Algorithms to compute persistent homology and bottleneck distance. +Cubical complexes +================= -We refer to :cite:`gudhilibrary_ICMS14` for a detailed description of the -design of the library. +.. include:: cubical_complex_sum.inc -Data structures -*************** +Simplicial complexes +==================== Alpha complex -============= +------------- .. include:: alpha_complex_sum.inc -Cover complexes -=============== +Rips complex +------------- + +.. include:: rips_complex_sum.inc -.. include:: nerve_gic_complex_sum.rst +Witness complex +--------------- -Cubical complex +.. include:: witness_complex_sum.inc + +Cover complexes =============== -.. include:: cubical_complex_sum.inc +.. include:: nerve_gic_complex_sum.inc -Rips complex -============ +Data structures and basic operations +************************************ -.. include:: rips_complex_sum.inc +Data structures +=============== Simplex tree -============ +------------ .. include:: simplex_tree_sum.inc +Topological descriptors computation +*********************************** + +Persistence cohomology +====================== + +.. include:: persistent_cohomology_sum.inc + +Manifold reconstruction +*********************** + Tangential complex ================== .. include:: tangential_complex_sum.inc -Witness complex -=============== - -.. include:: witness_complex_sum.inc - -Toolbox -******* +Topological descriptors tools +***************************** Bottleneck distance =================== .. include:: bottleneck_distance_sum.inc -Persistence cohomology -====================== - -.. include:: persistent_cohomology_sum.inc - Persistence graphical tools =========================== diff --git a/src/cython/doc/nerve_gic_complex_sum.inc b/src/cython/doc/nerve_gic_complex_sum.inc new file mode 100644 index 00000000..47bf84fd --- /dev/null +++ b/src/cython/doc/nerve_gic_complex_sum.inc @@ -0,0 +1,16 @@ +.. table:: + :widths: 30 50 20 + + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ + | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière | + | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | | + | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 | + | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | | + | | the image of a scalar-valued function defined on the data. | :Copyright: GPL v3 | + | | | | + | | | :Requires: CGAL :math:`\geq` 4.8.1 | + | | | | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ + | :doc:`nerve_gic_complex_user` | :doc:`nerve_gic_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/nerve_gic_complex_sum.rst b/src/cython/doc/nerve_gic_complex_sum.rst deleted file mode 100644 index 47bf84fd..00000000 --- a/src/cython/doc/nerve_gic_complex_sum.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière | - | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | | - | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 | - | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | | - | | the image of a scalar-valued function defined on the data. | :Copyright: GPL v3 | - | | | | - | | | :Requires: CGAL :math:`\geq` 4.8.1 | - | | | | - | | | | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | :doc:`nerve_gic_complex_user` | :doc:`nerve_gic_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/nerve_gic_complex_user.rst b/src/cython/doc/nerve_gic_complex_user.rst index 44f30e1a..94a2b246 100644 --- a/src/cython/doc/nerve_gic_complex_user.rst +++ b/src/cython/doc/nerve_gic_complex_user.rst @@ -7,7 +7,7 @@ Cover complexes user manual Definition ---------- -.. include:: nerve_gic_complex_sum.rst +.. include:: nerve_gic_complex_sum.inc Visualizations of the simplicial complexes can be done with either neato (from `graphviz `_), -- cgit v1.2.3 From b9a6a203a71eed7f17de219070ca24e448b53b9e Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 29 May 2019 14:17:00 +0200 Subject: Citations for bottleneck distance --- biblio/bibliography.bib | 22 +++++++++++++++++++++- .../doc/Intro_bottleneck_distance.h | 3 +++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index be4c2db5..288692b2 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -1139,4 +1139,24 @@ language={English} timestamp = {Mon, 13 Aug 2018 16:46:26 +0200}, biburl = {https://dblp.org/rec/bib/journals/corr/BoissonnatS16}, bibsource = {dblp computer science bibliography, https://dblp.org} -} \ No newline at end of file +} + +@article{Kerber:2017:GHC:3047249.3064175, + author = {Kerber, Michael and Morozov, Dmitriy and Nigmetov, Arnur}, + title = {Geometry Helps to Compare Persistence Diagrams}, + journal = {J. Exp. Algorithmics}, + issue_date = {2017}, + volume = {22}, + month = sep, + year = {2017}, + issn = {1084-6654}, + pages = {1.4:1--1.4:20}, + articleno = {1.4}, + numpages = {20}, + url = {http://doi.acm.org/10.1145/3064175}, + doi = {10.1145/3064175}, + acmid = {3064175}, + publisher = {ACM}, + address = {New York, NY, USA}, + keywords = {Assignment problems, bipartite matching, k-d tree, persistent homology}, +} diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h index f8fce96c..6fd058a8 100644 --- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h +++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h @@ -41,6 +41,9 @@ namespace persistence_diagram { * * \image html perturb_pd.png On this picture, the red edges represent the matching. The bottleneck distance is the length of the longest edge. * + * This implementation is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" + * \cite DBLP:journals/algorithmica/EfratIK01. Another relevant publication, although it was not used is + * "Geometry Helps to Compare Persistence Diagrams" \cite Kerber:2017:GHC:3047249.3064175. */ /** @} */ // end defgroup bottleneck_distance -- cgit v1.2.3 From 1d53826baf9f2bccb96a7242dbabcc55d8977b19 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 3 Jun 2019 15:10:07 +0200 Subject: modify cells height for C++ doc. Use bullet lists to have the same font height on main page --- src/common/doc/main_page.md | 28 +++++++++++----------- src/cython/doc/alpha_complex_sum.inc | 2 +- src/cython/doc/bottleneck_distance_sum.inc | 4 ++-- src/cython/doc/cubical_complex_sum.inc | 2 +- src/cython/doc/nerve_gic_complex_sum.inc | 2 +- src/cython/doc/persistence_graphical_tools_sum.inc | 4 ++-- src/cython/doc/persistent_cohomology_sum.inc | 14 +++++------ src/cython/doc/rips_complex_sum.inc | 2 +- src/cython/doc/simplex_tree_sum.inc | 2 +- src/cython/doc/tangential_complex_sum.inc | 4 ++-- src/cython/doc/witness_complex_sum.inc | 10 ++++---- 11 files changed, 37 insertions(+), 37 deletions(-) diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index a74ebd26..e61eee81 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -23,7 +23,7 @@ - + User manual: \ref cubical_complex - Reference manual: Gudhi::cubical_complex::Bitmap_cubical_complex @@ -56,7 +56,7 @@ - + User manual: \ref alpha_complex - Reference manual: Gudhi::alpha_complex::Alpha_complex and Gudhi::alpha_complex::Alpha_complex_3d @@ -81,7 +81,7 @@ - + User manual: \ref cech_complex - Reference manual: Gudhi::cech_complex::Cech_complex @@ -107,7 +107,7 @@ - + User manual: \ref rips_complex - Reference manual: Gudhi::rips_complex::Rips_complex @@ -132,7 +132,7 @@ - + User manual: \ref witness_complex - Reference manual: Gudhi::witness_complex::SimplicialComplexForWitness @@ -159,7 +159,7 @@ - + User manual: \ref cover_complex - Reference manual: Gudhi::cover_complex::Cover_complex @@ -187,7 +187,7 @@ - + User manual: \ref simplex_tree - Reference manual: Gudhi::Simplex_tree @@ -215,7 +215,7 @@ - + User manual: \ref skbl - Reference manual: Gudhi::skeleton_blocker::Skeleton_blocker_complex @@ -240,7 +240,7 @@ - + User manual: \ref toplex_map - Reference manual: Gudhi::Toplex_map @@ -268,7 +268,7 @@ - + User manual: \ref contr @@ -300,7 +300,7 @@ - + User manual: \ref persistent_cohomology - Reference manual: Gudhi::persistent_cohomology::Persistent_cohomology @@ -330,7 +330,7 @@ - + User manual: \ref tangential_complex - Reference manual: Gudhi::tangential_complex::Tangential_complex @@ -359,7 +359,7 @@ - + User manual: \ref bottleneck_distance @@ -384,7 +384,7 @@ - + User manual: \ref Persistence_representations diff --git a/src/cython/doc/alpha_complex_sum.inc b/src/cython/doc/alpha_complex_sum.inc index 100edbbd..806988bb 100644 --- a/src/cython/doc/alpha_complex_sum.inc +++ b/src/cython/doc/alpha_complex_sum.inc @@ -16,5 +16,5 @@ | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | | | advised for better performance). | | +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------+ - | :doc:`alpha_complex_user` | :doc:`alpha_complex_ref` | + | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/bottleneck_distance_sum.inc b/src/cython/doc/bottleneck_distance_sum.inc index 4fedb744..41b9c5a3 100644 --- a/src/cython/doc/bottleneck_distance_sum.inc +++ b/src/cython/doc/bottleneck_distance_sum.inc @@ -10,5 +10,5 @@ | the longest edge | | | | | | :Requires: CGAL :math:`\geq` 4.8.0 | +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ - | :doc:`bottleneck_distance_user` | | | - +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ + | * :doc:`bottleneck_distance_user` | | + +-----------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/cubical_complex_sum.inc b/src/cython/doc/cubical_complex_sum.inc index 6dcc1fda..6dcf8e48 100644 --- a/src/cython/doc/cubical_complex_sum.inc +++ b/src/cython/doc/cubical_complex_sum.inc @@ -9,6 +9,6 @@ | | | :Copyright: GPL v3 | | | | | +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ - | :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | + | * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | | | * :doc:`periodic_cubical_complex_ref` | +--------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/nerve_gic_complex_sum.inc b/src/cython/doc/nerve_gic_complex_sum.inc index 47bf84fd..0e606fe1 100644 --- a/src/cython/doc/nerve_gic_complex_sum.inc +++ b/src/cython/doc/nerve_gic_complex_sum.inc @@ -12,5 +12,5 @@ | | | | | | | | +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | :doc:`nerve_gic_complex_user` | :doc:`nerve_gic_complex_ref` | + | * :doc:`nerve_gic_complex_user` | * :doc:`nerve_gic_complex_ref` | +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/persistence_graphical_tools_sum.inc b/src/cython/doc/persistence_graphical_tools_sum.inc index ee164652..b412de56 100644 --- a/src/cython/doc/persistence_graphical_tools_sum.inc +++ b/src/cython/doc/persistence_graphical_tools_sum.inc @@ -10,5 +10,5 @@ | | | | | | | :Requires: matplotlib, numpy and scipy | +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ - | :doc:`persistence_graphical_tools_user` | :doc:`persistence_graphical_tools_ref` | | - +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | * :doc:`persistence_graphical_tools_user` | * :doc:`persistence_graphical_tools_ref` | + +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/persistent_cohomology_sum.inc b/src/cython/doc/persistent_cohomology_sum.inc index 5bf1a7bf..20ca073c 100644 --- a/src/cython/doc/persistent_cohomology_sum.inc +++ b/src/cython/doc/persistent_cohomology_sum.inc @@ -17,10 +17,10 @@ | | :cite:`DBLP:conf/esa/BoissonnatDM13`. | | | | | | +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ - | :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | | - | | feature for reference: | | - | | | | - | | * :doc:`simplex_tree_ref` | | - | | * :doc:`cubical_complex_ref` | | - | | * :doc:`periodic_cubical_complex_ref` | | - +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | * :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | + | | feature for reference: | + | | | + | | * :doc:`simplex_tree_ref` | + | | * :doc:`cubical_complex_ref` | + | | * :doc:`periodic_cubical_complex_ref` | + +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/rips_complex_sum.inc b/src/cython/doc/rips_complex_sum.inc index b32b810e..e8e505e2 100644 --- a/src/cython/doc/rips_complex_sum.inc +++ b/src/cython/doc/rips_complex_sum.inc @@ -12,5 +12,5 @@ | | This complex can be built from a point cloud and a distance function, | | | | or from a distance matrix. | | +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ - | :doc:`rips_complex_user` | :doc:`rips_complex_ref` | + | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` | +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/simplex_tree_sum.inc b/src/cython/doc/simplex_tree_sum.inc index 7e10e366..086c69d5 100644 --- a/src/cython/doc/simplex_tree_sum.inc +++ b/src/cython/doc/simplex_tree_sum.inc @@ -9,5 +9,5 @@ | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: GPL v3 | | | | | +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ - | :doc:`simplex_tree_user` | :doc:`simplex_tree_ref` | + | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` | +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/tangential_complex_sum.inc b/src/cython/doc/tangential_complex_sum.inc index db1c5ab1..0f03ffb3 100644 --- a/src/cython/doc/tangential_complex_sum.inc +++ b/src/cython/doc/tangential_complex_sum.inc @@ -10,5 +10,5 @@ | | dimension :math:`k`. | | | | | :Requires: CGAL :math:`\geq` 4.8.0 | +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | :doc:`tangential_complex_user` | :doc:`tangential_complex_ref` | | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ + | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/witness_complex_sum.inc b/src/cython/doc/witness_complex_sum.inc index 9dc9aef6..49577745 100644 --- a/src/cython/doc/witness_complex_sum.inc +++ b/src/cython/doc/witness_complex_sum.inc @@ -10,8 +10,8 @@ | | | | | | | :Requires: Eigen3 and CGAL :math:`\geq` 4.6.0 for Euclidean versions only | +-------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------------------------------------+ - | :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | | - | | * :doc:`strong_witness_complex_ref` | | - | | * :doc:`euclidean_witness_complex_ref` | | - | | * :doc:`euclidean_strong_witness_complex_ref` | | - +-------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------------------------------------+ + | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | + | | * :doc:`strong_witness_complex_ref` | + | | * :doc:`euclidean_witness_complex_ref` | + | | * :doc:`euclidean_strong_witness_complex_ref` | + +-------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------+ -- cgit v1.2.3 From 942c58493afc4a705e446c4e54ccb6b6b4b0ab85 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 3 Jun 2019 15:56:57 +0200 Subject: Precise that the default version is not weighted and not periodic --- src/Alpha_complex/utilities/alphacomplex.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Alpha_complex/utilities/alphacomplex.md b/src/Alpha_complex/utilities/alphacomplex.md index 50a39d32..b77656ff 100644 --- a/src/Alpha_complex/utilities/alphacomplex.md +++ b/src/Alpha_complex/utilities/alphacomplex.md @@ -103,8 +103,10 @@ to be recorded. Enter a negative value to see zero length intervals. * `-c [ --cuboid-file ]` is the path to the file describing the periodic domain. It must be in the format described [here]({{ site.officialurl }}/doc/latest/fileformats.html#FileFormatsIsoCuboid). +Default version is not periodic. * `-w [ --weight-file ]` is the path to the file containing the weights of the points (one value per line). +Default version is not weighted. * `-e [ --exact ]` for the exact computation version (not compatible with weight and periodic version). * `-f [ --fast ]` for the fast computation version. -- cgit v1.2.3 From 5d2095ce1bb0907cc1de2d97c3b056d00f29a83d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 3 Jun 2019 16:00:08 +0200 Subject: Fix #56 --- src/Alpha_complex/include/gudhi/Alpha_complex_3d.h | 2 +- src/common/doc/installation.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h index 32dfcc16..0bf12b1a 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h @@ -58,7 +58,7 @@ #include // for numeric_limits<> #if CGAL_VERSION_NR < 1041101000 -// Make compilation fail - required for external projects - https://gitlab.inria.fr/GUDHI/gudhi-devel/issues/10 +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 # error Alpha_complex_3d is only available for CGAL >= 4.11 #endif diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 8fb8b330..5d581b08 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -44,7 +44,7 @@ make doxygen \endverbatim * * \subsection helloworld Hello world ! - * The Hello world for GUDHI + * The Hello world for GUDHI * project is an example to help developers to make their own C++ project on top of the GUDHI library. * * \section optionallibrary Optional third-party library -- cgit v1.2.3 From 2d6850f405b5ebf2e9953f989862633289f99622 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 3 Jun 2019 16:49:34 +0200 Subject: Fix #46 --- src/cython/include/Simplex_tree_interface.h | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/src/cython/include/Simplex_tree_interface.h b/src/cython/include/Simplex_tree_interface.h index 3481eeff..ca98517d 100644 --- a/src/cython/include/Simplex_tree_interface.h +++ b/src/cython/include/Simplex_tree_interface.h @@ -45,7 +45,7 @@ class Simplex_tree_interface : public Simplex_tree { using Simplex_handle = typename Base::Simplex_handle; using Insertion_result = typename std::pair; using Simplex = std::vector; - using Complex = std::vector>; + using Filtered_simplices = std::vector>; public: bool find_simplex(const Simplex& vh) { @@ -94,9 +94,9 @@ class Simplex_tree_interface : public Simplex_tree { Base::initialize_filtration(); } - Complex get_filtration() { + Filtered_simplices get_filtration() { Base::initialize_filtration(); - Complex filtrations; + Filtered_simplices filtrations; for (auto f_simplex : Base::filtration_simplex_range()) { Simplex simplex; for (auto vertex : Base::simplex_vertex_range(f_simplex)) { @@ -107,8 +107,8 @@ class Simplex_tree_interface : public Simplex_tree { return filtrations; } - Complex get_skeleton(int dimension) { - Complex skeletons; + Filtered_simplices get_skeleton(int dimension) { + Filtered_simplices skeletons; for (auto f_simplex : Base::skeleton_simplex_range(dimension)) { Simplex simplex; for (auto vertex : Base::simplex_vertex_range(f_simplex)) { @@ -119,29 +119,25 @@ class Simplex_tree_interface : public Simplex_tree { return skeletons; } - Complex get_star(const Simplex& simplex) { - Complex star; + Filtered_simplices get_star(const Simplex& simplex) { + Filtered_simplices star; for (auto f_simplex : Base::star_simplex_range(Base::find(simplex))) { Simplex simplex_star; for (auto vertex : Base::simplex_vertex_range(f_simplex)) { - std::cout << vertex << " "; simplex_star.insert(simplex_star.begin(), vertex); } - std::cout << std::endl; star.push_back(std::make_pair(simplex_star, Base::filtration(f_simplex))); } return star; } - Complex get_cofaces(const Simplex& simplex, int dimension) { - Complex cofaces; + Filtered_simplices get_cofaces(const Simplex& simplex, int dimension) { + Filtered_simplices cofaces; for (auto f_simplex : Base::cofaces_simplex_range(Base::find(simplex), dimension)) { Simplex simplex_coface; for (auto vertex : Base::simplex_vertex_range(f_simplex)) { - std::cout << vertex << " "; simplex_coface.insert(simplex_coface.begin(), vertex); } - std::cout << std::endl; cofaces.push_back(std::make_pair(simplex_coface, Base::filtration(f_simplex))); } return cofaces; -- cgit v1.2.3 From 7882d938d921672377c1db58385bec77ddd1584a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 3 Jun 2019 17:31:10 +0200 Subject: Fix #55 --- src/Simplex_tree/include/gudhi/Simplex_tree.h | 2 +- src/cython/cython/simplex_tree.pyx | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 4b18651c..343ed472 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -1330,7 +1330,7 @@ class Simplex_tree { public: /** \brief This function ensures that each simplex has a higher filtration value than its faces by increasing the * filtration values. - * @return The filtration modification information. + * @return True if any filtration value was modified, false if the filtration was already non-decreasing. * \post Some simplex tree functions require the filtration to be valid. `make_filtration_non_decreasing()` * function is not launching `initialize_filtration()` but returns the filtration modification information. If the * complex has changed , please call `initialize_filtration()` to recompute it. diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx index 0ab97f80..a38e309d 100644 --- a/src/cython/cython/simplex_tree.pyx +++ b/src/cython/cython/simplex_tree.pyx @@ -405,7 +405,8 @@ cdef class SimplexTree: """This function ensures that each simplex has a higher filtration value than its faces by increasing the filtration values. - :returns: The filtration modification information. + :returns: True if any filtration value was modified, + False if the filtration was already non-decreasing. :rtype: bool -- cgit v1.2.3 From ee92004d1d860f1cb95d086095401f3d9e23788b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 4 Jun 2019 11:15:27 +0200 Subject: Fix #9 : rename Complex_ds as FilteredComplex. Set private types of non-exposed types --- .../concept/FilteredComplex.h | 25 +++++++------------ .../include/gudhi/Persistent_cohomology.h | 28 ++++++++++++---------- 2 files changed, 24 insertions(+), 29 deletions(-) diff --git a/src/Persistent_cohomology/concept/FilteredComplex.h b/src/Persistent_cohomology/concept/FilteredComplex.h index 62b9002f..7eb01b01 100644 --- a/src/Persistent_cohomology/concept/FilteredComplex.h +++ b/src/Persistent_cohomology/concept/FilteredComplex.h @@ -27,7 +27,7 @@ */ struct FilteredComplex { -/** Handle to specify a simplex. */ +/** \brief Handle to specify a simplex. */ typedef unspecified Simplex_handle; /** \brief Type for the value of the filtration function. * @@ -39,8 +39,7 @@ struct FilteredComplex * is model of IndexingTag. */ typedef unspecified Indexing_tag; -/** Returns a Simplex_handle that is different from all simplex handles - * of the simplices. */ +/** \brief Returns a Simplex_handle that is different from all simplex handles of the simplices. */ Simplex_handle null_simplex(); /** \brief Returns the number of simplices in the complex. * @@ -58,22 +57,19 @@ struct FilteredComplex * * This is only called on valid indices. */ Simplex_handle simplex ( size_t idx ); -/** \brief Iterator on the simplices belonging to the - * boundary of a simplex. +/** \brief Iterator on the simplices belonging to the boundary of a simplex. * * value_type must be 'Simplex_handle'. */ typedef unspecified Boundary_simplex_iterator; -/** \brief Range giving access to the simplices in the boundary of - * a simplex. +/** \brief Range giving access to the simplices in the boundary of a simplex. * * .begin() and .end() return type Boundary_simplex_iterator. */ typedef unspecified Boundary_simplex_range; -/** \brief Returns a range giving access to all simplices of the - * boundary of a simplex, i.e. - * the set of codimension 1 subsimplices of the Simplex. +/** \brief Returns a range giving access to all simplices of the boundary of a simplex, i.e. the set of codimension 1 + * subsimplices of the Simplex. * * If the simplex is \f$[v_0, \cdots ,v_d]\f$, with canonical orientation * induced by \f$ v_0 < \cdots < v_d \f$, the iterator enumerates the @@ -84,19 +80,16 @@ typedef unspecified Boundary_simplex_range; * gives the chains corresponding to the boundary of the simplex.*/ Boundary_simplex_range boundary_simplex_range(Simplex_handle sh); -/** \brief Iterator over all simplices of the complex - * in the order of the indexing scheme. +/** \brief Iterator over all simplices of the complex in the order of the indexing scheme. * * 'value_type' must be 'Simplex_handle'. */ typedef unspecified Filtration_simplex_iterator; -/** \brief Range over the simplices of the complex - * in the order of the filtration. +/** \brief Range over the simplices of the complex in the order of the filtration. * * .begin() and .end() return type Filtration_simplex_iterator.*/ typedef unspecified Filtration_simplex_range; -/** \brief Returns a range over the simplices of the complex - * in the order of the filtration. +/** \brief Returns a range over the simplices of the complex in the order of the filtration. * * .begin() and .end() return type Filtration_simplex_iterator.*/ Filtration_simplex_range filtration_simplex_range(); diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index c51e47a5..ca697450 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -63,12 +63,15 @@ namespace persistent_cohomology { template class Persistent_cohomology { public: - typedef FilteredComplex Complex_ds; // Data attached to each simplex to interface with a Property Map. - typedef typename Complex_ds::Simplex_key Simplex_key; - typedef typename Complex_ds::Simplex_handle Simplex_handle; - typedef typename Complex_ds::Filtration_value Filtration_value; + typedef typename FilteredComplex::Simplex_key Simplex_key; + typedef typename FilteredComplex::Simplex_handle Simplex_handle; + typedef typename FilteredComplex::Filtration_value Filtration_value; typedef typename CoefficientField::Element Arith_element; + /** \brief Persistent interval type. The Arith_element field is used for the multi-field framework. */ + typedef std::tuple Persistent_interval; + + private: // Compressed Annotation Matrix types: // Column type typedef Persistent_cohomology_column Column; // contains 1 set_hook @@ -83,16 +86,15 @@ class Persistent_cohomology { boost::intrusive::constant_time_size > Cam; // Sparse column type for the annotation of the boundary of an element. typedef std::vector > A_ds_type; - // Persistent interval type. The Arith_element field is used for the multi-field framework. - typedef std::tuple Persistent_interval; + public: /** \brief Initializes the Persistent_cohomology class. * * @param[in] cpx Complex for which the persistent homology is computed. * cpx is a model of FilteredComplex * @exception std::out_of_range In case the number of simplices is more than Simplex_key type numeric limit. */ - explicit Persistent_cohomology(Complex_ds& cpx) + explicit Persistent_cohomology(FilteredComplex& cpx) : cpx_(&cpx), dim_max_(cpx.dimension()), // upper bound on the dimension of the simplices coeff_field_(), // initialize the field coefficient structure. @@ -128,7 +130,7 @@ class Persistent_cohomology { * @param[in] persistence_dim_max if true, the persistent homology for the maximal dimension in the * complex is computed. If false, it is ignored. Default is false. */ - Persistent_cohomology(Complex_ds& cpx, bool persistence_dim_max) + Persistent_cohomology(FilteredComplex& cpx, bool persistence_dim_max) : Persistent_cohomology(cpx) { if (persistence_dim_max) { ++dim_max_; @@ -146,7 +148,7 @@ class Persistent_cohomology { private: struct length_interval { - length_interval(Complex_ds * cpx, Filtration_value min_length) + length_interval(FilteredComplex * cpx, Filtration_value min_length) : cpx_(cpx), min_length_(min_length) { } @@ -159,7 +161,7 @@ class Persistent_cohomology { min_length_ = new_length; } - Complex_ds * cpx_; + FilteredComplex * cpx_; Filtration_value min_length_; }; @@ -552,14 +554,14 @@ class Persistent_cohomology { * Compare two intervals by length. */ struct cmp_intervals_by_length { - explicit cmp_intervals_by_length(Complex_ds * sc) + explicit cmp_intervals_by_length(FilteredComplex * sc) : sc_(sc) { } bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) { return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1)) > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2))); } - Complex_ds * sc_; + FilteredComplex * sc_; }; public: @@ -733,7 +735,7 @@ class Persistent_cohomology { }; public: - Complex_ds * cpx_; + FilteredComplex * cpx_; int dim_max_; CoefficientField coeff_field_; size_t num_simplices_; -- cgit v1.2.3 From 733c9efab57e489e849d32123a69ac090d3c585a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 4 Jun 2019 17:21:59 +0200 Subject: Fix #7 : document better get_persistence_pairs (C++) and persistence_pairs (Python) methods --- .../include/gudhi/Persistent_cohomology.h | 12 ++++++++---- src/cython/cython/simplex_tree.pyx | 5 ++--- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index ca697450..452527c4 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -64,13 +64,18 @@ template class Persistent_cohomology { public: // Data attached to each simplex to interface with a Property Map. + + /** \brief Data stored for each simplex. */ typedef typename FilteredComplex::Simplex_key Simplex_key; + /** \brief Handle to specify a simplex. */ typedef typename FilteredComplex::Simplex_handle Simplex_handle; + /** \brief Type for the value of the filtration function. */ typedef typename FilteredComplex::Filtration_value Filtration_value; + /** \brief Type of element of the field. */ typedef typename CoefficientField::Element Arith_element; /** \brief Persistent interval type. The Arith_element field is used for the multi-field framework. */ typedef std::tuple Persistent_interval; - + private: // Compressed Annotation Matrix types: // Column type @@ -692,9 +697,8 @@ class Persistent_cohomology { return betti_number; } - /** @brief Returns the persistent pairs. - * @return Persistent pairs - * + /** @brief Returns a list of persistence birth and death FilteredComplex::Simplex_handle pairs. + * @return A list of Persistent_cohomology::Persistent_interval */ const std::vector& get_persistent_pairs() const { return persistent_pairs_; diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx index a38e309d..ea99c940 100644 --- a/src/cython/cython/simplex_tree.pyx +++ b/src/cython/cython/simplex_tree.pyx @@ -514,10 +514,9 @@ cdef class SimplexTree: return intervals_result def persistence_pairs(self): - """This function returns the persistence pairs of the simplicial - complex. + """This function returns a list of persistence birth and death simplices pairs. - :returns: The persistence intervals. + :returns: A list of persistence simplices intervals. :rtype: list of pair of list of int :note: persistence_pairs function requires -- cgit v1.2.3 From 5661593ab3d8f7bd4cda867fc0348c86d26106e3 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 5 Jun 2019 07:57:54 +0200 Subject: Fix #20: Add OFF section in file format and all documents target to it instead of geomview.org documentation. Modify data/points/alphacomplexdoc.off in dimension 3 to be used as an example for OFF section in file format. --- data/points/alphacomplexdoc.off | 22 ++++++++-------- src/Alpha_complex/utilities/alphacomplex.md | 4 +-- src/Doxyfile.in | 3 ++- src/Nerve_GIC/doc/Intro_graph_induced_complex.h | 3 +-- src/common/doc/file_formats.h | 26 +++++++++++++++++++ .../example/vectordoubleoffreader_result.txt | 14 +++++------ src/common/include/gudhi/Off_reader.h | 3 +-- src/common/test/test_points_off_reader.cpp | 14 +++++------ src/cython/doc/fileformats.rst | 29 ++++++++++++++++++++++ src/cython/doc/nerve_gic_complex_user.rst | 3 +-- 10 files changed, 88 insertions(+), 33 deletions(-) diff --git a/data/points/alphacomplexdoc.off b/data/points/alphacomplexdoc.off index bb790193..a90db49a 100644 --- a/data/points/alphacomplexdoc.off +++ b/data/points/alphacomplexdoc.off @@ -1,10 +1,12 @@ -nOFF -2 7 0 0 -1.0 1.0 -7.0 0.0 -4.0 6.0 -9.0 6.0 -0.0 14.0 -2.0 19.0 -9.0 17.0 - +# Default is 3 dimension +OFF +# 7 vertices - 0 face - 0 edge +7 0 0 +# Point set: +1.0 1.0 0.0 +7.0 0.0 0.0 +4.0 6.0 0.0 +9.0 6.0 0.0 +0.0 14.0 0.0 +2.0 19.0 0.0 +9.0 17.0 0.0 diff --git a/src/Alpha_complex/utilities/alphacomplex.md b/src/Alpha_complex/utilities/alphacomplex.md index b77656ff..fcd16a3b 100644 --- a/src/Alpha_complex/utilities/alphacomplex.md +++ b/src/Alpha_complex/utilities/alphacomplex.md @@ -33,7 +33,7 @@ a prime number). where `` is the path to the input point cloud in -[nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). +[nOFF ASCII format]({{ site.officialurl }}/doc/latest/fileformats.html#FileFormatsOFF). **Allowed options** @@ -87,7 +87,7 @@ a prime number). ``` where `` is the path to the input point cloud in -[nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). +[nOFF ASCII format]({{ site.officialurl }}/doc/latest/fileformats.html#FileFormatsOFF). **Allowed options** diff --git a/src/Doxyfile.in b/src/Doxyfile.in index 54a438d4..5b1b8ecc 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -821,7 +821,8 @@ EXCLUDE_SYMBOLS = EXAMPLE_PATH = biblio/ \ example/ \ - utilities/ + utilities/ \ + data/ # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h index bc8aecc3..e72d63dd 100644 --- a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h +++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h @@ -37,8 +37,7 @@ namespace cover_complex { * neato (from graphviz), * geomview, * KeplerMapper. - * Input point clouds are assumed to be - * OFF files. + * Input point clouds are assumed to be \ref FileFormatsOFF "OFF files" * * \section covers Covers * diff --git a/src/common/doc/file_formats.h b/src/common/doc/file_formats.h index 23214e25..235296d3 100644 --- a/src/common/doc/file_formats.h +++ b/src/common/doc/file_formats.h @@ -29,6 +29,32 @@ namespace Gudhi { \tableofcontents + \section FileFormatsOFF OFF file format + + OFF files must be conform to format described here: http://www.geomview.org/docs/html/OFF.html + + OFF files are mainly used as point cloud inputs. Here is an example of 7 points in a 3-dimensional space. As edges and + faces are not used for point set, there is no need to specify them (just set their numbers to 0): + + \include points/alphacomplexdoc.off + + For dimensions bigger than 3, the dimension can be set like here: + \verbatim + # Dimension is no more 3 + nOFF + # dimension 4 - 7 vertices - 0 face - 0 edge + 4 7 0 0 + # Point set: + 1.0 1.0 0.0 0.0 + 7.0 0.0 0.0 0.0 + 4.0 6.0 0.0 0.0 + 9.0 6.0 0.0 0.0 + 0.0 14.0 0.0 0.0 + 2.0 19.0 0.0 0.0 + 9.0 17.0 0.0 0.0 + \endverbatim + + \section FileFormatsPers Persistence Diagram Such a file, whose extension is usually `.pers`, contains a list of persistence intervals.
diff --git a/src/common/example/vectordoubleoffreader_result.txt b/src/common/example/vectordoubleoffreader_result.txt index 1deb8dbd..b399425a 100644 --- a/src/common/example/vectordoubleoffreader_result.txt +++ b/src/common/example/vectordoubleoffreader_result.txt @@ -1,7 +1,7 @@ -Point[0] = 1 1 -Point[1] = 7 0 -Point[2] = 4 6 -Point[3] = 9 6 -Point[4] = 0 14 -Point[5] = 2 19 -Point[6] = 9 17 +Point[0] = 1 1 0 +Point[1] = 7 0 0 +Point[2] = 4 6 0 +Point[3] = 9 6 0 +Point[4] = 0 14 0 +Point[5] = 2 19 0 +Point[6] = 9 17 0 diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h index 05a1e145..fc951fe7 100644 --- a/src/common/include/gudhi/Off_reader.h +++ b/src/common/include/gudhi/Off_reader.h @@ -37,8 +37,7 @@ namespace Gudhi { /** \brief OFF file reader top class visitor. * - * OFF file must be conform to format described here : - * http://www.geomview.org/docs/html/OFF.html + * OFF file must be conform to \ref FileFormatsOFF */ class Off_reader { public: diff --git a/src/common/test/test_points_off_reader.cpp b/src/common/test/test_points_off_reader.cpp index ba3bab71..e4b76ed7 100644 --- a/src/common/test/test_points_off_reader.cpp +++ b/src/common/test/test_points_off_reader.cpp @@ -44,19 +44,19 @@ BOOST_AUTO_TEST_CASE( points_doc_test ) BOOST_CHECK(point_cloud.size() == 7); std::vector expected_points; - std::vector point = {1.0, 1.0}; + std::vector point = {1.0, 1.0, 0.0}; expected_points.push_back(Point_d(point.begin(), point.end())); - point = {7.0, 0.0}; + point = {7.0, 0.0, 0.0}; expected_points.push_back(Point_d(point.begin(), point.end())); - point = {4.0, 6.0}; + point = {4.0, 6.0, 0.0}; expected_points.push_back(Point_d(point.begin(), point.end())); - point = {9.0, 6.0}; + point = {9.0, 6.0, 0.0}; expected_points.push_back(Point_d(point.begin(), point.end())); - point = {0.0, 14.0}; + point = {0.0, 14.0, 0.0}; expected_points.push_back(Point_d(point.begin(), point.end())); - point = {2.0, 19.0}; + point = {2.0, 19.0, 0.0}; expected_points.push_back(Point_d(point.begin(), point.end())); - point = {9.0, 17.0}; + point = {9.0, 17.0, 0.0}; expected_points.push_back(Point_d(point.begin(), point.end())); BOOST_CHECK(point_cloud == expected_points); diff --git a/src/cython/doc/fileformats.rst b/src/cython/doc/fileformats.rst index e205cc8b..345dfdba 100644 --- a/src/cython/doc/fileformats.rst +++ b/src/cython/doc/fileformats.rst @@ -5,6 +5,35 @@ File formats ############ +OFF file format +*************** + +OFF files must be conform to format described here: +http://www.geomview.org/docs/html/OFF.html + +OFF files are mainly used as point cloud inputs. Here is an example of 7 points +in a 3-dimensional space. As edges and faces are not used for point set, there +is no need to specify them (just set their numbers to 0): + +.. literalinclude:: ../../data/points/alphacomplexdoc.off + +.. centered:: ../../points/alphacomplexdoc.off + +For dimensions bigger than 3, the dimension can be set like here:: + + # Dimension is no more 3 + nOFF + # dimension 4 - 7 vertices - 0 face - 0 edge + 4 7 0 0 + # Point set: + 1.0 1.0 0.0 0.0 + 7.0 0.0 0.0 0.0 + 4.0 6.0 0.0 0.0 + 9.0 6.0 0.0 0.0 + 0.0 14.0 0.0 0.0 + 2.0 19.0 0.0 0.0 + 9.0 17.0 0.0 0.0 + Persistence Diagram ******************* diff --git a/src/cython/doc/nerve_gic_complex_user.rst b/src/cython/doc/nerve_gic_complex_user.rst index 94a2b246..9101f45d 100644 --- a/src/cython/doc/nerve_gic_complex_user.rst +++ b/src/cython/doc/nerve_gic_complex_user.rst @@ -13,8 +13,7 @@ Visualizations of the simplicial complexes can be done with either neato (from `graphviz `_), `geomview `_, `KeplerMapper `_. -Input point clouds are assumed to be -`OFF files `_. +Input point clouds are assumed to be OFF files (cf. :doc:`fileformats`). Covers ------ -- cgit v1.2.3 From 4e33acbac51c8c2348dc88a16eb38e14c8ef724a Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 5 Jun 2019 09:46:58 +0200 Subject: typo --- src/common/doc/main_page.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index e61eee81..88e761df 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -374,8 +374,8 @@ It contains implementation of various representations of persistence diagrams; diagrams themselves, persistence - landscapes (rigorous and grid version), persistence heath maps, vectors and others. It implements basic - functionalities which are neccessary to use persistence in statistics and machine learning. + landscapes (rigorous and grid version), persistence heat maps, vectors and others. It implements basic + functionalities which are necessary to use persistence in statistics and machine learning. Author: Pawel Dlotko
-- cgit v1.2.3 From 84e40389f77bdb0a614efb947469d8920c723738 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 5 Jun 2019 11:06:04 +0200 Subject: Add some debug traces --- .appveyor.yml | 11 ++++++----- src/cython/CMakeLists.txt | 6 ++++++ 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 294744b1..1048328e 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -26,7 +26,7 @@ environment: # PYTHON: "C:\\Python37-x64" - target: Python - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DMPFR_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DMPFR_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" PYTHON: "C:\\Python37-x64" @@ -42,10 +42,10 @@ install: - vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows - SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH% - CALL "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" amd64 - - if [%target%]==[Python] ( python --version & - pip --version & - python -m pip install --upgrade pip & - pip install -U setuptools numpy matplotlib scipy Cython pytest ) + - python --version + - pip --version + - python -m pip install --upgrade pip + - pip install -U setuptools numpy matplotlib scipy Cython pytest ) build_script: - mkdir build @@ -53,6 +53,7 @@ build_script: - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( cd src/cython & + Type setup.py & python setup.py install & MSBuild RUN_TESTS.vcxproj ) else ( diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index 480332d7..d4ace20e 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -138,6 +138,7 @@ if(PYTHONINTERP_FOUND) else() add_gudhi_cython_lib("${Boost_THREAD_LIBRARY_RELEASE}") endif() + message("** Add Boost ${Boost_LIBRARY_DIRS}") set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ") endif() # Add CGAL compilation args @@ -148,6 +149,7 @@ if(PYTHONINTERP_FOUND) add_gudhi_debug_info("CGAL version ${CGAL_VERSION}") add_gudhi_cython_lib("${CGAL_LIBRARY}") set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ") + message("** Add CGAL ${CGAL_LIBRARIES_DIR}") # If CGAL is not header only, CGAL library may link with boost system, if(CMAKE_BUILD_TYPE MATCHES Debug) add_gudhi_cython_lib("${Boost_SYSTEM_LIBRARY_DEBUG}") @@ -155,6 +157,7 @@ if(PYTHONINTERP_FOUND) add_gudhi_cython_lib("${Boost_SYSTEM_LIBRARY_RELEASE}") endif() set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ") + message("** Add Boost ${Boost_LIBRARY_DIRS}") endif(CGAL_HEADER_ONLY) # GMP and GMPXX are not required, but if present, CGAL will link with them. if(GMP_FOUND) @@ -162,11 +165,13 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ") add_gudhi_cython_lib("${GMP_LIBRARIES}") set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ") + message("** Add gmp ${GMP_LIBRARIES_DIR}") if(GMPXX_FOUND) add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ") add_gudhi_cython_lib("${GMPXX_LIBRARIES}") set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ") + message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}") endif(GMPXX_FOUND) endif(GMP_FOUND) endif(CGAL_FOUND) @@ -195,6 +200,7 @@ if(PYTHONINTERP_FOUND) add_gudhi_cython_lib("${TBB_MALLOC_RELEASE_LIBRARY}") endif() set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${TBB_LIBRARY_DIRS}', ") + message("** Add tbb ${TBB_LIBRARY_DIRS}") set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ") endif() -- cgit v1.2.3 From f676adcda7304bf29a50f5f010cdaa443ea2d2d3 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 5 Jun 2019 11:43:38 +0200 Subject: Typo --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 1048328e..0b10454f 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -45,7 +45,7 @@ install: - python --version - pip --version - python -m pip install --upgrade pip - - pip install -U setuptools numpy matplotlib scipy Cython pytest ) + - pip install -U setuptools numpy matplotlib scipy Cython pytest build_script: - mkdir build -- cgit v1.2.3 From 55ddac99e964b21880604f41be938dc5eb990aa2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 5 Jun 2019 11:49:36 +0200 Subject: Add CMake tag for gmp library directory --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 0b10454f..b0396d8b 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -26,7 +26,7 @@ environment: # PYTHON: "C:\\Python37-x64" - target: Python - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" PYTHON: "C:\\Python37-x64" -- cgit v1.2.3 From 4cb49545685c8eee54bd6e77ad8212516674799d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 5 Jun 2019 22:07:01 +0200 Subject: Print gudhi version to see if well installed --- .appveyor.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.appveyor.yml b/.appveyor.yml index b0396d8b..e352d09a 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -55,6 +55,7 @@ build_script: cd src/cython & Type setup.py & python setup.py install & + python -c "import gudhi; print(gudhi.__version__)" & MSBuild RUN_TESTS.vcxproj ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & -- cgit v1.2.3 From f79f39561b1ba239157774190c2a3ddb9c69d9fd Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 5 Jun 2019 22:24:01 +0200 Subject: Add debug traces and set PYTHONPATH --- .appveyor.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.appveyor.yml b/.appveyor.yml index e352d09a..1595f0a5 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -41,6 +41,9 @@ init: install: - vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows - SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH% + - "ECHO %PYTHON%" + - SET PYTHONPATH=%PYTHON%\\Lib\\site-packages;%PYTHONPATH% + - "ECHO %PYTHONPATH%" - CALL "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" amd64 - python --version - pip --version -- cgit v1.2.3 From c60b439d7c609a93e1efcfce558f34744121391b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 5 Jun 2019 22:37:49 +0200 Subject: Add debug traces and set PYTHONPATH --- .appveyor.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.appveyor.yml b/.appveyor.yml index 1595f0a5..0d834aa8 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -55,6 +55,7 @@ build_script: - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( + ECHO %PYTHONPATH% & cd src/cython & Type setup.py & python setup.py install & -- cgit v1.2.3 From 02df78ee81ccb3a080ce37eab709f92e14bd8f29 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 6 Jun 2019 09:14:05 +0200 Subject: Add debug traces --- .appveyor.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.appveyor.yml b/.appveyor.yml index 0d834aa8..492d1677 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -59,6 +59,7 @@ build_script: cd src/cython & Type setup.py & python setup.py install & + DIR %PYTHON%\\Lib\\site-packages & python -c "import gudhi; print(gudhi.__version__)" & MSBuild RUN_TESTS.vcxproj ) else ( -- cgit v1.2.3 From b9ddc3b6f1501fc15e84a8bea5927adae6623e5a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 6 Jun 2019 09:34:17 +0200 Subject: Let's try this fix from support team --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 492d1677..dea0fc15 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -40,7 +40,7 @@ init: install: - vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows - - SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH% + - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PYTHON%\Library\bin;%PATH% - "ECHO %PYTHON%" - SET PYTHONPATH=%PYTHON%\\Lib\\site-packages;%PYTHONPATH% - "ECHO %PYTHONPATH%" -- cgit v1.2.3 From ef5fcf9e54e0c14f7c2e7eebb5c24fb91018606a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 6 Jun 2019 10:38:07 +0200 Subject: Fix doc review --- src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index 452527c4..c57174cb 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -73,7 +73,8 @@ class Persistent_cohomology { typedef typename FilteredComplex::Filtration_value Filtration_value; /** \brief Type of element of the field. */ typedef typename CoefficientField::Element Arith_element; - /** \brief Persistent interval type. The Arith_element field is used for the multi-field framework. */ + /** \brief Type for birth and death FilteredComplex::Simplex_handle. + * The Arith_element field is used for the multi-field framework. */ typedef std::tuple Persistent_interval; private: -- cgit v1.2.3 From 30fac00a3aee2f26aa44e62eb923af733c4f3bc7 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 6 Jun 2019 11:56:54 +0200 Subject: Add an example to explain how the distance is computed --- .../doc/bottleneck_distance_example.ipe | 287 +++++++++++++++++++++ .../doc/bottleneck_distance_example.png | Bin 0 -> 29619 bytes 2 files changed, 287 insertions(+) create mode 100644 src/Bottleneck_distance/doc/bottleneck_distance_example.ipe create mode 100644 src/Bottleneck_distance/doc/bottleneck_distance_example.png diff --git a/src/Bottleneck_distance/doc/bottleneck_distance_example.ipe b/src/Bottleneck_distance/doc/bottleneck_distance_example.ipe new file mode 100644 index 00000000..9dc5420e --- /dev/null +++ b/src/Bottleneck_distance/doc/bottleneck_distance_example.ipe @@ -0,0 +1,287 @@ + + + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + +0.6 0 0 0.6 0 0 e + + + + + +0.5 0 0 0.5 0 0 e + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h + + + + + +-0.5 -0.5 m +0.5 -0.5 l +0.5 0.5 l +-0.5 0.5 l +h + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + + + +-0.43 -0.57 m +0.57 0.43 l +0.43 0.57 l +-0.57 -0.43 l +h + + +-0.43 0.57 m +0.57 -0.43 l +0.43 -0.57 l +-0.57 0.43 l +h + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +-1 0.333 m +0 0 l +-1 -0.333 l + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +160 256 m +288 256 l + + +160 256 m +160 384 l + + +160 256 m +264 360 l +264 360 l + + + + +200 296 m +224 320 l + +(0, 0) +(13, 0) +(6.5, 6.5) + +160.433 359.995 m +213.999 309.986 l + + +Bottleneck +distance is 6.5 + + diff --git a/src/Bottleneck_distance/doc/bottleneck_distance_example.png b/src/Bottleneck_distance/doc/bottleneck_distance_example.png new file mode 100644 index 00000000..fb6847c8 Binary files /dev/null and b/src/Bottleneck_distance/doc/bottleneck_distance_example.png differ -- cgit v1.2.3 From 90198a22800834acd333f9b8360221035f6862cf Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 6 Jun 2019 14:36:36 +0200 Subject: Fix #11 - Distance computation shall be better documented. --- .../doc/Intro_bottleneck_distance.h | 40 +++++++++++++++++++++ .../doc/bottleneck_distance_example.png | Bin 29619 -> 21465 bytes .../example/bottleneck_basic_example.cpp | 22 ------------ src/Bottleneck_distance/include/gudhi/Bottleneck.h | 7 ++++ src/cython/doc/bottleneck_distance_user.rst | 33 ++++++++++++++--- 5 files changed, 75 insertions(+), 27 deletions(-) diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h index 6fd058a8..49137ee1 100644 --- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h +++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h @@ -6,6 +6,9 @@ * * Copyright (C) 2015 Inria * + * Modifications: + * - 2019/06 Vincent Rouvreau : Fix #11 - Distance computation shall be better documented. + * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or @@ -44,6 +47,43 @@ namespace persistence_diagram { * This implementation is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" * \cite DBLP:journals/algorithmica/EfratIK01. Another relevant publication, although it was not used is * "Geometry Helps to Compare Persistence Diagrams" \cite Kerber:2017:GHC:3047249.3064175. + * + * \section bottleneckdistanceprecision Distance computation + * + * Bottleneck distance does not use Euclidean distance, like explained in the following example: + * + * \code{.cpp} +#include + +#include +#include +#include // for pair + +int main() { + std::vector< std::pair > diag1, diag2; + diag1.emplace_back(0., 0.); + diag2.emplace_back(0., 13.); + + double b = Gudhi::persistence_diagram::bottleneck_distance(diag1, diag2); + std::cout << "Bottleneck distance = " << b << std::endl; +} + * \endcode + * + * \code Bottleneck distance = 6.5 + * \endcode + * + * \image html bottleneck_distance_example.png The point (0, 13) is at 6.5 distance from the diagonal and more specifically from the point (6.5, 6.5) + * + * \section bottleneckbasicexample Basic example + * + * This another example computes the bottleneck distance from 2 persistence diagrams: + * \include Bottleneck_distance/bottleneck_basic_example.cpp + * + * \code + Bottleneck distance = 0.75 + Approx bottleneck distance = 0.808176 + * \endcode + */ /** @} */ // end defgroup bottleneck_distance diff --git a/src/Bottleneck_distance/doc/bottleneck_distance_example.png b/src/Bottleneck_distance/doc/bottleneck_distance_example.png index fb6847c8..b56ee791 100644 Binary files a/src/Bottleneck_distance/doc/bottleneck_distance_example.png and b/src/Bottleneck_distance/doc/bottleneck_distance_example.png differ diff --git a/src/Bottleneck_distance/example/bottleneck_basic_example.cpp b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp index 3df7d12d..61778a55 100644 --- a/src/Bottleneck_distance/example/bottleneck_basic_example.cpp +++ b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp @@ -1,25 +1,3 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Authors: Francois Godi, small modifications by Pawel Dlotko - * - * Copyright (C) 2015 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - #include #include diff --git a/src/Bottleneck_distance/include/gudhi/Bottleneck.h b/src/Bottleneck_distance/include/gudhi/Bottleneck.h index 7a553006..4ce6cacc 100644 --- a/src/Bottleneck_distance/include/gudhi/Bottleneck.h +++ b/src/Bottleneck_distance/include/gudhi/Bottleneck.h @@ -6,6 +6,9 @@ * * Copyright (C) 2015 Inria * + * Modifications: + * - 2019/06 Vincent Rouvreau : Fix doxygen warning. + * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or @@ -92,6 +95,10 @@ inline double bottleneck_distance_exact(Persistence_graph& g) { * * \tparam Persistence_diagram1,Persistence_diagram2 * models of the concept `PersistenceDiagram`. + * + * \param[in] diag1 The first persistence diagram. + * \param[in] diag2 The second persistence diagram. + * * \param[in] e * \parblock * If `e` is 0, this uses an expensive algorithm to compute the exact distance. diff --git a/src/cython/doc/bottleneck_distance_user.rst b/src/cython/doc/bottleneck_distance_user.rst index 605db022..96e881c8 100644 --- a/src/cython/doc/bottleneck_distance_user.rst +++ b/src/cython/doc/bottleneck_distance_user.rst @@ -13,11 +13,34 @@ Function -------- .. autofunction:: gudhi.bottleneck_distance +Distance computation +-------------------- + +Bottleneck distance does not use Euclidean distance, like explained in the following example: + +.. testcode:: + + import gudhi + + message = "Bottleneck distance = " + '%.2f' % gudhi.bottleneck_distance([0., 0.], [0., 13.]) + print(message) + +.. testoutput:: + + Bottleneck distance = 6.5 + +.. figure:: + ../../doc/Bottleneck_distance/bottleneck_distance_example.png + :figclass: align-center + + The point (0, 13) is at 6.5 distance from the diagonal and more + specifically from the point (6.5, 6.5) + Basic example ------------- -This example computes the bottleneck distance from 2 persistence diagrams: +This another example computes the bottleneck distance from 2 persistence diagrams: .. testcode:: @@ -26,15 +49,15 @@ This example computes the bottleneck distance from 2 persistence diagrams: diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]] diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]] - message = "Bottleneck distance approximation=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0.1) + message = "Bottleneck distance approximation = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0.1) print(message) - message = "Bottleneck distance value=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2) + message = "Bottleneck distance value = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2) print(message) The output is: .. testoutput:: - Bottleneck distance approximation=0.81 - Bottleneck distance value=0.75 + Bottleneck distance approximation = 0.81 + Bottleneck distance value = 0.75 -- cgit v1.2.3 From 03b8ed6090acbebca633440896958626e4eb4080 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 6 Jun 2019 15:05:05 +0200 Subject: Fix sphinx test --- src/cython/doc/bottleneck_distance_user.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cython/doc/bottleneck_distance_user.rst b/src/cython/doc/bottleneck_distance_user.rst index 96e881c8..a6a4426f 100644 --- a/src/cython/doc/bottleneck_distance_user.rst +++ b/src/cython/doc/bottleneck_distance_user.rst @@ -22,7 +22,7 @@ Bottleneck distance does not use Euclidean distance, like explained in the follo import gudhi - message = "Bottleneck distance = " + '%.2f' % gudhi.bottleneck_distance([0., 0.], [0., 13.]) + message = "Bottleneck distance = " + '%.1f' % gudhi.bottleneck_distance([[0., 0.]], [[0., 13.]]) print(message) .. testoutput:: -- cgit v1.2.3 From a08323ea6b3706191bbd546fcbfcd5d321e0ac92 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 7 Jun 2019 15:34:02 +0200 Subject: Try with a direct link to mathjax website --- src/Doxyfile.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Doxyfile.in b/src/Doxyfile.in index 1c293d1c..9a6a3074 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -1440,7 +1440,7 @@ MATHJAX_FORMAT = HTML-CSS # The default value is: http://cdn.mathjax.org/mathjax/latest. # This tag requires that the tag USE_MATHJAX is set to YES. -MATHJAX_RELPATH = ../common +MATHJAX_RELPATH = https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2 # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example -- cgit v1.2.3 From 18e07282a3b9b1e8616ca871d79d7dc820e94a85 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 7 Jun 2019 15:50:31 +0200 Subject: MathJax.js no more required --- src/common/doc/MathJax.js | 53 ----------------------------------------------- 1 file changed, 53 deletions(-) delete mode 100644 src/common/doc/MathJax.js diff --git a/src/common/doc/MathJax.js b/src/common/doc/MathJax.js deleted file mode 100644 index 35e1994e..00000000 --- a/src/common/doc/MathJax.js +++ /dev/null @@ -1,53 +0,0 @@ -(function () { - var newMathJax = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js'; - var oldMathJax = 'cdn.mathjax.org/mathjax/latest/MathJax.js'; - - var replaceScript = function (script, src) { - // - // Make redirected script - // - var newScript = document.createElement('script'); - newScript.src = newMathJax + src.replace(/.*?(\?|$)/, '$1'); - // - // Move onload and onerror handlers to new script - // - newScript.onload = script.onload; - newScript.onerror = script.onerror; - script.onload = script.onerror = null; - // - // Move any content (old-style configuration scripts) - // - while (script.firstChild) newScript.appendChild(script.firstChild); - // - // Copy script id - // - if (script.id != null) newScript.id = script.id; - // - // Replace original script with new one - // - script.parentNode.replaceChild(newScript, script); - // - // Issue a console warning - // - console.warn('WARNING: cdn.mathjax.org has been retired. Check https://www.mathjax.org/cdn-shutting-down/ for migration tips.') - } - - if (document.currentScript) { - var script = document.currentScript; - replaceScript(script, script.src); - } else { - // - // Look for current script by searching for one with the right source - // - var n = oldMathJax.length; - var scripts = document.getElementsByTagName('script'); - for (var i = 0; i < scripts.length; i++) { - var script = scripts[i]; - var src = (script.src || '').replace(/.*?:\/\//,''); - if (src.substr(0, n) === oldMathJax) { - replaceScript(script, src); - break; - } - } - } -})(); \ No newline at end of file -- cgit v1.2.3 From af3b12d763fdd113a1c4de6b58cc4a096bc646e9 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Fri, 7 Jun 2019 15:59:51 +0200 Subject: Update src/Bottleneck_distance/doc/Intro_bottleneck_distance.h Co-Authored-By: Marc Glisse --- src/Bottleneck_distance/doc/Intro_bottleneck_distance.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h index 49137ee1..d204b610 100644 --- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h +++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h @@ -76,7 +76,7 @@ int main() { * * \section bottleneckbasicexample Basic example * - * This another example computes the bottleneck distance from 2 persistence diagrams: + * This other example computes the bottleneck distance from 2 persistence diagrams: * \include Bottleneck_distance/bottleneck_basic_example.cpp * * \code -- cgit v1.2.3 From cb890f276bc5327ebad1e1fcd1cc05a0d300de83 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Fri, 7 Jun 2019 16:00:12 +0200 Subject: Update src/Bottleneck_distance/doc/Intro_bottleneck_distance.h Co-Authored-By: Marc Glisse --- src/Bottleneck_distance/doc/Intro_bottleneck_distance.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h index d204b610..520aab4f 100644 --- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h +++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h @@ -72,7 +72,7 @@ int main() { * \code Bottleneck distance = 6.5 * \endcode * - * \image html bottleneck_distance_example.png The point (0, 13) is at 6.5 distance from the diagonal and more specifically from the point (6.5, 6.5) + * \image html bottleneck_distance_example.png The point (0, 13) is at distance 6.5 from the diagonal and more specifically from the point (6.5, 6.5) * * \section bottleneckbasicexample Basic example * -- cgit v1.2.3 From 4b401e9a51cbcbb44d47f4819b57ec7787088df4 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 7 Jun 2019 17:27:29 +0200 Subject: Fix doc review. Add citations for python doc version. Replace (13,0) with (0,13) in figure --- .../doc/Intro_bottleneck_distance.h | 5 +++-- .../doc/bottleneck_distance_example.ipe | 8 ++++---- .../doc/bottleneck_distance_example.png | Bin 21465 -> 19485 bytes src/common/doc/main_page.md | 4 +++- src/cython/doc/bottleneck_distance_sum.inc | 4 ++-- src/cython/doc/bottleneck_distance_user.rst | 10 +++++++--- 6 files changed, 19 insertions(+), 12 deletions(-) diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h index 520aab4f..7cb0752e 100644 --- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h +++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h @@ -40,7 +40,8 @@ namespace persistence_diagram { * * The bottleneck distance measures the similarity between two persistence diagrams. It is the shortest distance b for * which there exists a perfect matching between the points of the two diagrams (completed with all the points on the - * diagonal in order to ignore cardinality mismatchs) such that any couple of matched points are at distance at most b. + * diagonal in order to ignore cardinality mismatchs) such that any couple of matched points are at distance at most b, + * where the distance between points is the sup norm in \f$\mathbb{R}^2\f$ (not the Euclidean distance). * * \image html perturb_pd.png On this picture, the red edges represent the matching. The bottleneck distance is the length of the longest edge. * @@ -50,7 +51,7 @@ namespace persistence_diagram { * * \section bottleneckdistanceprecision Distance computation * - * Bottleneck distance does not use Euclidean distance, like explained in the following example: + * The following example explains how the distance is computed: * * \code{.cpp} #include diff --git a/src/Bottleneck_distance/doc/bottleneck_distance_example.ipe b/src/Bottleneck_distance/doc/bottleneck_distance_example.ipe index 9dc5420e..2033ea56 100644 --- a/src/Bottleneck_distance/doc/bottleneck_distance_example.ipe +++ b/src/Bottleneck_distance/doc/bottleneck_distance_example.ipe @@ -1,7 +1,7 @@ - + @@ -220,13 +220,13 @@ h + + - - @@ -274,7 +274,7 @@ h 224 320 l (0, 0) -(13, 0) +(0, 13) (6.5, 6.5) 160.433 359.995 m diff --git a/src/Bottleneck_distance/doc/bottleneck_distance_example.png b/src/Bottleneck_distance/doc/bottleneck_distance_example.png index b56ee791..1d3b91aa 100644 Binary files a/src/Bottleneck_distance/doc/bottleneck_distance_example.png and b/src/Bottleneck_distance/doc/bottleneck_distance_example.png differ diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index e61eee81..a0a3496d 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -349,7 +349,9 @@ Bottleneck distance measures the similarity between two persistence diagrams. It's the shortest distance b for which there exists a perfect matching between the points of the two diagrams (+ all the diagonal points) such that - any couple of matched points are at distance at most b. + any couple of matched points are at distance at most b, + where the distance between points is the sup norm in \f$\mathbb{R}^2\f$ + (not the Euclidean distance). Author: François Godi
diff --git a/src/cython/doc/bottleneck_distance_sum.inc b/src/cython/doc/bottleneck_distance_sum.inc index 41b9c5a3..6840e838 100644 --- a/src/cython/doc/bottleneck_distance_sum.inc +++ b/src/cython/doc/bottleneck_distance_sum.inc @@ -6,8 +6,8 @@ | ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | | | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 | | | diagonal points) such that any couple of matched points are at | | - | Bottleneck distance is the length of | distance at most b. | :Copyright: GPL v3 | - | the longest edge | | | + | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :Copyright: GPL v3 | + | the longest edge | norm in :math:`\mathbb{R}^2`. | | | | | :Requires: CGAL :math:`\geq` 4.8.0 | +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ | * :doc:`bottleneck_distance_user` | | diff --git a/src/cython/doc/bottleneck_distance_user.rst b/src/cython/doc/bottleneck_distance_user.rst index a6a4426f..9435c7f1 100644 --- a/src/cython/doc/bottleneck_distance_user.rst +++ b/src/cython/doc/bottleneck_distance_user.rst @@ -9,6 +9,10 @@ Definition .. include:: bottleneck_distance_sum.inc +This implementation is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" +:cite:`DBLP:journals/algorithmica/EfratIK01`. Another relevant publication, although it was not used is +"Geometry Helps to Compare Persistence Diagrams" :cite:`Kerber:2017:GHC:3047249.3064175`. + Function -------- .. autofunction:: gudhi.bottleneck_distance @@ -16,7 +20,7 @@ Function Distance computation -------------------- -Bottleneck distance does not use Euclidean distance, like explained in the following example: +The following example explains how the distance is computed: .. testcode:: @@ -33,14 +37,14 @@ Bottleneck distance does not use Euclidean distance, like explained in the follo ../../doc/Bottleneck_distance/bottleneck_distance_example.png :figclass: align-center - The point (0, 13) is at 6.5 distance from the diagonal and more + The point (0, 13) is at distance 6.5 from the diagonal and more specifically from the point (6.5, 6.5) Basic example ------------- -This another example computes the bottleneck distance from 2 persistence diagrams: +This other example computes the bottleneck distance from 2 persistence diagrams: .. testcode:: -- cgit v1.2.3 From 456eadd7e6da04c6d35f02a79d3aa78e5d7bc970 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 09:49:23 +0200 Subject: Comment what fails --- .appveyor.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index dea0fc15..e481c62f 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -41,9 +41,7 @@ init: install: - vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PYTHON%\Library\bin;%PATH% - - "ECHO %PYTHON%" - SET PYTHONPATH=%PYTHON%\\Lib\\site-packages;%PYTHONPATH% - - "ECHO %PYTHONPATH%" - CALL "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" amd64 - python --version - pip --version @@ -57,11 +55,9 @@ build_script: - if [%target%]==[Python] ( ECHO %PYTHONPATH% & cd src/cython & - Type setup.py & python setup.py install & - DIR %PYTHON%\\Lib\\site-packages & - python -c "import gudhi; print(gudhi.__version__)" & - MSBuild RUN_TESTS.vcxproj + ECHO This fails : python -c "import gudhi; print(gudhi.__version__)" & + ECHO This fails : MSBuild RUN_TESTS.vcxproj ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From 1d299f3f21bf5f4acc4e8d19cf3dcde6c3045133 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 09:59:36 +0200 Subject: Seperate build and tests --- .appveyor.yml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index e481c62f..82520c40 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -56,9 +56,16 @@ build_script: ECHO %PYTHONPATH% & cd src/cython & python setup.py install & - ECHO This fails : python -c "import gudhi; print(gudhi.__version__)" & - ECHO This fails : MSBuild RUN_TESTS.vcxproj ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & - ctest -j 1 -C Release -E diff_files ) + +test_script: + - cd build + - if [%target%]==[Python] ( + cd src/cython & + python -c "import gudhi; print(gudhi.__version__)" & + ECHO MSBuild RUN_TESTS.vcxproj + ) else ( + ctest -j 1 -C Release -E diff_files + ) \ No newline at end of file -- cgit v1.2.3 From b8185dff1a17a7101b043554a1bf688366c377de Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 10:18:52 +0200 Subject: fix typo --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 82520c40..22c204b6 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -55,7 +55,7 @@ build_script: - if [%target%]==[Python] ( ECHO %PYTHONPATH% & cd src/cython & - python setup.py install & + python setup.py install ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ) -- cgit v1.2.3 From 2180529dd41471f08da8ccfb026338f98eed918c Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 10:22:59 +0200 Subject: fix typo --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 22c204b6..10b0d29c 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -57,7 +57,7 @@ build_script: cd src/cython & python setup.py install ) else ( - MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & + MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 ) test_script: -- cgit v1.2.3 From d6f72bae8c30cab74c92c6ee98f4da961059000a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 10:36:29 +0200 Subject: print sys.path --- .appveyor.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.appveyor.yml b/.appveyor.yml index 10b0d29c..e41da62d 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -64,6 +64,7 @@ test_script: - cd build - if [%target%]==[Python] ( cd src/cython & + python -c "import sys; print(sys.path)" & python -c "import gudhi; print(gudhi.__version__)" & ECHO MSBuild RUN_TESTS.vcxproj ) else ( -- cgit v1.2.3 From a3ff485140593e5c9c1471ed6fe8add2b209ac16 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 11:01:42 +0200 Subject: Add vcpkg dll path to the PATH --- .appveyor.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.appveyor.yml b/.appveyor.yml index e41da62d..3d109a75 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -40,6 +40,7 @@ init: install: - vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows + - SET PATH=c:\Tools\vcpkg\installed\x86-windows\bin;%PATH% - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PYTHON%\Library\bin;%PATH% - SET PYTHONPATH=%PYTHON%\\Lib\\site-packages;%PYTHONPATH% - CALL "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" amd64 @@ -63,6 +64,7 @@ build_script: test_script: - cd build - if [%target%]==[Python] ( + dumpbin /dependents %PYTHON%\\Lib\\site-packages\\ cd src/cython & python -c "import sys; print(sys.path)" & python -c "import gudhi; print(gudhi.__version__)" & -- cgit v1.2.3 From 7bbb2f4dbdca006a18c08d6bebb6782c19fc649e Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 11:09:58 +0200 Subject: Add dumpbin to find dll dependencies --- .appveyor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 3d109a75..d3f44b32 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -40,7 +40,7 @@ init: install: - vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows - - SET PATH=c:\Tools\vcpkg\installed\x86-windows\bin;%PATH% + - SET PATH=c:\Tools\vcpkg\installed\x64-windows\bin;%PATH% - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PYTHON%\Library\bin;%PATH% - SET PYTHONPATH=%PYTHON%\\Lib\\site-packages;%PYTHONPATH% - CALL "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" amd64 @@ -64,7 +64,7 @@ build_script: test_script: - cd build - if [%target%]==[Python] ( - dumpbin /dependents %PYTHON%\\Lib\\site-packages\\ + dumpbin /dependents %PYTHON%\\Lib\\site-packages\\gudhi.cp37-win_amd64.pyd cd src/cython & python -c "import sys; print(sys.path)" & python -c "import gudhi; print(gudhi.__version__)" & -- cgit v1.2.3 From 3c0f3dc1a42ddd0b6af4b605f880d36664971bd0 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 11:17:05 +0200 Subject: Let's try --- .appveyor.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index d3f44b32..ef1b4de9 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -54,7 +54,6 @@ build_script: - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( - ECHO %PYTHONPATH% & cd src/cython & python setup.py install ) else ( @@ -64,11 +63,9 @@ build_script: test_script: - cd build - if [%target%]==[Python] ( - dumpbin /dependents %PYTHON%\\Lib\\site-packages\\gudhi.cp37-win_amd64.pyd cd src/cython & - python -c "import sys; print(sys.path)" & python -c "import gudhi; print(gudhi.__version__)" & - ECHO MSBuild RUN_TESTS.vcxproj + MSBuild RUN_TESTS.vcxproj ) else ( ctest -j 1 -C Release -E diff_files ) \ No newline at end of file -- cgit v1.2.3 From 8a9c74ecfa011904ac0c57b054c12f3889686958 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 13:23:22 +0200 Subject: Add traces --- .appveyor.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.appveyor.yml b/.appveyor.yml index ef1b4de9..49405619 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -63,6 +63,7 @@ build_script: test_script: - cd build - if [%target%]==[Python] ( + DIR src/cython & cd src/cython & python -c "import gudhi; print(gudhi.__version__)" & MSBuild RUN_TESTS.vcxproj -- cgit v1.2.3 From a6fbf3763510592154fb004ff484aa8c15c5bf35 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 13:33:55 +0200 Subject: Merge build and tests --- .appveyor.yml | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 49405619..c6475f2e 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -55,18 +55,9 @@ build_script: - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( cd src/cython & - python setup.py install - ) else ( - MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 - ) - -test_script: - - cd build - - if [%target%]==[Python] ( - DIR src/cython & - cd src/cython & - python -c "import gudhi; print(gudhi.__version__)" & + python setup.py install & MSBuild RUN_TESTS.vcxproj ) else ( + MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files - ) \ No newline at end of file + ) -- cgit v1.2.3 From ba1055e7e267095cf9a35a74116f53ad326697fa Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 13:39:51 +0200 Subject: Shall be good --- .appveyor.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index c6475f2e..31eb48d4 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -13,17 +13,17 @@ environment: APPVEYOR_SAVE_CACHE_ON_ERROR: true matrix: -# - target: Examples -# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF -# PYTHON: "C:\\Python37-x64" -# -# - target: UnitaryTests -# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF -# PYTHON: "C:\\Python37-x64" -# -# - target: Utilities -# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF -# PYTHON: "C:\\Python37-x64" + - target: Examples + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" + + - target: UnitaryTests + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" + + - target: Utilities + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - target: Python CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" -- cgit v1.2.3 From 5e96f167e9f8d847ed82b37dd82bdfd60202660d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 15:34:36 +0200 Subject: Verbose Alpha_complex_unit_test as it fails --- .appveyor.yml | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 31eb48d4..cd562b75 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -13,21 +13,21 @@ environment: APPVEYOR_SAVE_CACHE_ON_ERROR: true matrix: - - target: Examples - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" +# - target: Examples +# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF +# PYTHON: "C:\\Python37-x64" - target: UnitaryTests CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF PYTHON: "C:\\Python37-x64" - - target: Utilities - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" +# - target: Utilities +# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF +# PYTHON: "C:\\Python37-x64" - - target: Python - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" - PYTHON: "C:\\Python37-x64" +# - target: Python +# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" +# PYTHON: "C:\\Python37-x64" cache: @@ -53,11 +53,13 @@ build_script: - mkdir build - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - - if [%target%]==[Python] ( - cd src/cython & - python setup.py install & - MSBuild RUN_TESTS.vcxproj - ) else ( - MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & - ctest -j 1 -C Release -E diff_files - ) + - MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 + - ctest -j 1 -C Release -R Alpha_complex_test_unit -V +# - if [%target%]==[Python] ( +# cd src/cython & +# python setup.py install & +# MSBuild RUN_TESTS.vcxproj +# ) else ( +# MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & +# ctest -j 1 -C Release -E diff_files +# ) -- cgit v1.2.3 From 8867d40f40c4d35fa2988e8b70b0290744d9c74a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 16:27:44 +0200 Subject: try to fix Alpha_complex_test_unit on windows --- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index 622fcae8..b46b6da5 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -42,7 +42,7 @@ // Use dynamic_dimension_tag for the user to be able to set dimension typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel_d; // Use static dimension_tag for the user not to be able to set dimension -typedef CGAL::Epick_d< CGAL::Dimension_tag<2> > Kernel_s; +typedef CGAL::Epick_d< CGAL::Dimension_tag<3> > Kernel_s; // The triangulation uses the default instantiation of the TriangulationDataStructure template parameter typedef boost::mpl::list list_of_kernel_variants; -- cgit v1.2.3 From f58f0bb2cb99076d0cd3a11ad39f3277213e3f5e Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 11 Jun 2019 17:16:18 +0200 Subject: Shall be good --- .appveyor.yml | 36 +++++++++++++++++------------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index cd562b75..31eb48d4 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -13,21 +13,21 @@ environment: APPVEYOR_SAVE_CACHE_ON_ERROR: true matrix: -# - target: Examples -# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF -# PYTHON: "C:\\Python37-x64" + - target: Examples + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - target: UnitaryTests CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF PYTHON: "C:\\Python37-x64" -# - target: Utilities -# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF -# PYTHON: "C:\\Python37-x64" + - target: Utilities + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" -# - target: Python -# CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" -# PYTHON: "C:\\Python37-x64" + - target: Python + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" + PYTHON: "C:\\Python37-x64" cache: @@ -53,13 +53,11 @@ build_script: - mkdir build - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - - MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 - - ctest -j 1 -C Release -R Alpha_complex_test_unit -V -# - if [%target%]==[Python] ( -# cd src/cython & -# python setup.py install & -# MSBuild RUN_TESTS.vcxproj -# ) else ( -# MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & -# ctest -j 1 -C Release -E diff_files -# ) + - if [%target%]==[Python] ( + cd src/cython & + python setup.py install & + MSBuild RUN_TESTS.vcxproj + ) else ( + MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & + ctest -j 1 -C Release -E diff_files + ) -- cgit v1.2.3 From 27d0f9c592181845e38f85da6a62c6a129b56122 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 12 Jun 2019 07:50:39 +0200 Subject: Remove references manual --- src/common/doc/main_page.md | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index 88e761df..47d0583a 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -24,7 +24,7 @@ - User manual: \ref cubical_complex - Reference manual: Gudhi::cubical_complex::Bitmap_cubical_complex + User manual: \ref cubical_complex @@ -57,8 +57,7 @@ - User manual: \ref alpha_complex - Reference manual: Gudhi::alpha_complex::Alpha_complex and - Gudhi::alpha_complex::Alpha_complex_3d + User manual: \ref alpha_complex @@ -82,7 +81,7 @@ - User manual: \ref cech_complex - Reference manual: Gudhi::cech_complex::Cech_complex + User manual: \ref cech_complex @@ -108,7 +107,7 @@ - User manual: \ref rips_complex - Reference manual: Gudhi::rips_complex::Rips_complex + User manual: \ref rips_complex @@ -133,7 +132,7 @@ - User manual: \ref witness_complex - Reference manual: Gudhi::witness_complex::SimplicialComplexForWitness + User manual: \ref witness_complex @@ -149,7 +148,6 @@ topological information about the input data. They can be computed with a cover of the data, that comes i.e. from the preimage of a family of intervals covering the image of a scalar-valued function defined on the data.
- User manual: \ref cover_complex - Reference manual: Gudhi::cover_complex::Cover_complex Author: Mathieu Carrière
@@ -160,7 +158,7 @@ - User manual: \ref cover_complex - Reference manual: Gudhi::cover_complex::Cover_complex + User manual: \ref cover_complex @@ -188,7 +186,7 @@ - User manual: \ref simplex_tree - Reference manual: Gudhi::Simplex_tree + User manual: \ref simplex_tree @@ -216,7 +214,7 @@ - User manual: \ref skbl - Reference manual: Gudhi::skeleton_blocker::Skeleton_blocker_complex + User manual: \ref skbl @@ -241,7 +239,7 @@ - User manual: \ref toplex_map - Reference manual: Gudhi::Toplex_map + User manual: \ref toplex_map @@ -301,7 +299,7 @@ - User manual: \ref persistent_cohomology - Reference manual: Gudhi::persistent_cohomology::Persistent_cohomology + User manual: \ref persistent_cohomology @@ -331,7 +329,7 @@ - User manual: \ref tangential_complex - Reference manual: Gudhi::tangential_complex::Tangential_complex + User manual: \ref tangential_complex -- cgit v1.2.3 From db3f192fbc3ea8b47b208563735ef0958e7adab9 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 12 Jun 2019 10:20:06 +0200 Subject: Add appveyor build status badge --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 6cc05ff6..8636ac77 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ [![Build Status](https://travis-ci.org/GUDHI/gudhi-devel.svg?branch=master)](https://travis-ci.org/GUDHI/gudhi-devel) [![CircleCI](https://circleci.com/gh/GUDHI/gudhi-devel/tree/master.svg?style=svg)](https://circleci.com/gh/GUDHI/gudhi-devel/tree/master) +[![Build status](https://ci.appveyor.com/api/projects/status/976j2uut8xgalvx2/branch/master?svg=true)](https://ci.appveyor.com/project/GUDHI/gudhi-devel/branch/master) + ![GUDHI](src/common/doc/Gudhi_banner.png "Topological Data Analysis (TDA) and Higher Dimensional Geometry Understanding") -- cgit v1.2.3 From af52d14be2d742ee73df418dd8a8464e2f849d74 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 12 Jun 2019 11:05:49 +0200 Subject: Only one constructor to clarify documentation about persistence_dim_max parameter --- .../include/gudhi/Persistent_cohomology.h | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index c57174cb..689a17c0 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -98,9 +98,13 @@ class Persistent_cohomology { * * @param[in] cpx Complex for which the persistent homology is computed. * cpx is a model of FilteredComplex + * + * @param[in] persistence_dim_max if true, the persistent homology for the maximal dimension in the + * complex is computed. If false, it is ignored. Default is false. + * * @exception std::out_of_range In case the number of simplices is more than Simplex_key type numeric limit. */ - explicit Persistent_cohomology(FilteredComplex& cpx) + explicit Persistent_cohomology(FilteredComplex& cpx, bool persistence_dim_max = false) : cpx_(&cpx), dim_max_(cpx.dimension()), // upper bound on the dimension of the simplices coeff_field_(), // initialize the field coefficient structure. @@ -126,18 +130,6 @@ class Persistent_cohomology { ++idx_fil; dsets_.make_set(cpx_->key(sh)); } - } - - /** \brief Initializes the Persistent_cohomology class. - * - * @param[in] cpx Complex for which the persistent homology is compiuted. - * cpx is a model of FilteredComplex - * - * @param[in] persistence_dim_max if true, the persistent homology for the maximal dimension in the - * complex is computed. If false, it is ignored. Default is false. - */ - Persistent_cohomology(FilteredComplex& cpx, bool persistence_dim_max) - : Persistent_cohomology(cpx) { if (persistence_dim_max) { ++dim_max_; } -- cgit v1.2.3 From 2f85df31681bb808a39fbeff9d0787ba7f632052 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 12 Jun 2019 11:44:01 +0200 Subject: Fix alpha complex 3d utilities and add links to utils pages --- .../doc/Intro_persistent_cohomology.h | 67 +++++++++++----------- 1 file changed, 35 insertions(+), 32 deletions(-) diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h index 5fb9d4d2..3e0ad133 100644 --- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h +++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h @@ -154,6 +154,8 @@ diagram. 3 1 0.104347 inf 3 2 0.138335 inf \endcode +More details on the Rips complex utilities dedicated page. + \li Persistent_cohomology/rips_multifield_persistence.cpp computes the Rips complex of a point cloud and outputs its persistence diagram with a family of field coefficients. @@ -166,6 +168,8 @@ The file should contain square or lower triangular distance matrix with semicolo The code do not check if it is dealing with a distance matrix. It is the user responsibility to provide a valid input. Please refer to data/distance_matrix/lower_triangular_distance_matrix.csv for an example of a file. +More details on the Rips complex utilities dedicated page. + \li Rips_complex/rips_correlation_matrix_persistence.cpp computes the Rips complex of a correlation matrix and outputs its persistence diagram. @@ -175,6 +179,8 @@ It is the user responsibility to ensure that this is the case. The input is to b triangular matrix. Please refer to data/correlation_matrix/lower_triangular_correlation_matrix.csv for an example of a file. +More details on the Rips complex utilities dedicated page. + \li Alpha_complex/alpha_complex_3d_persistence.cpp computes the persistent homology with \f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file. @@ -185,48 +191,33 @@ Alpha_complex/alpha_complex_3d_persistence.cpp computes the persistent homol 2 1 0.0934117 1.00003 2 2 0.56444 1.03938 \endcode -\li -Alpha_complex/exact_alpha_complex_3d_persistence.cpp computes the persistent homology with -\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file. +More details on the Alpha complex utilities dedicated page. + Here, as CGAL computes the exact values, it is slower, but it is necessary when points are on a grid -for instance. -\code $> ./exact_alpha_complex_3d_persistence ../../data/points/sphere3D_pts_on_grid.off -p 2 -m 0.1 \endcode +for instance (the fast version `--fast` would give incorrect values). +\code $> ./alpha_complex_3d_persistence ../../data/points/sphere3D_pts_on_grid.off --exact -p 2 -m 0.1 \endcode \code Simplex_tree dim: 3 2 0 0 inf 2 2 0.0002 0.2028 \endcode -\li -Alpha_complex/weighted_alpha_complex_3d_persistence.cpp computes the persistent homology with +It can also compute the persistent homology with \f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the weighted alpha complex on points sampling from an OFF file and a weights file. -\code $> ./weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off -../../data/points/tore3D_300.weights -p 2 -m 0.45 \endcode +\code $> ./alpha_complex_3d_persistence ../../data/points/tore3D_300.off +--weight-file ../../data/points/tore3D_300.weights -p 2 -m 0.45 \endcode \code Simplex_tree dim: 3 2 0 -1 inf 2 1 -0.931784 0.000103311 2 1 -0.906588 2.60165e-05 2 2 -0.43556 0.0393798 \endcode -\li -Alpha_complex/alpha_complex_persistence.cpp computes the persistent homology with -\f$\mathbb{Z}/p\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file. -\code $> ./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off \endcode -\code Alpha complex is of dimension 3 - 9273 simplices - 300 vertices. -Simplex_tree dim: 3 -2 0 0 inf -2 1 0.0682162 1.0001 -2 1 0.0934117 1.00003 -2 2 0.56444 1.03938 \endcode - -\li -Alpha_complex/periodic_alpha_complex_3d_persistence.cpp computes the persistent homology with +One can also compute the persistent homology with \f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the periodic alpha complex on points sampling from an OFF file. The second parameter is a \ref FileFormatsIsoCuboid file with coordinates of the periodic cuboid. Note that the lengths of the sides of the periodic cuboid have to be the same. -\code $> ./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off -../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0 \endcode -\code Periodic Delaunay computed. -Simplex_tree dim: 3 +\code $> ./alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off +--cuboid-file ../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0 \endcode +\code Simplex_tree dim: 3 3 0 0 inf 3 1 0.0025 inf 3 1 0.0025 inf @@ -236,18 +227,17 @@ Simplex_tree dim: 3 3 2 0.005 inf 3 3 0.0075 inf \endcode -\li -Persistent_cohomology/weighted_periodic_alpha_complex_3d_persistence.cpp computes the persistent homology with +In order to compute the persistent homology with \f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the periodic alpha complex on weighted points from an OFF file. The additional parameters of this program are:
(a) The file with the weights of points. The file consist of a sequence of numbers (as many as points). Note that the weight of each single point have to be bounded by 1/64 times the square of the cuboid edge length.
(b) A \ref FileFormatsIsoCuboid file with coordinates of the periodic cuboid. Note that the lengths of the sides of the periodic cuboid have to be the same.
-\code $> ./weighted_periodic_alpha_complex_3d_persistence ../../data/points/shifted_sphere.off -../../data/points/shifted_sphere.weights ../../data/points/iso_cuboid_3_in_0_10.txt 3 1.0 \endcode -\code Weighted Periodic Delaunay computed. -Simplex_tree dim: 3 +\code $> ./alpha_complex_3d_persistence ../../data/points/shifted_sphere.off +--weight-file ../../data/points/shifted_sphere.weights +--cuboid-file ../../data/points/iso_cuboid_3_in_0_10.txt -p 3 -m 1.0 \endcode +\code Simplex_tree dim: 3 3 0 -0.0001 inf 3 1 16.0264 inf 3 1 16.0273 inf @@ -257,6 +247,19 @@ Simplex_tree dim: 3 3 2 36.8838 inf 3 3 58.6783 inf \endcode +\li +Alpha_complex/alpha_complex_persistence.cpp computes the persistent homology with +\f$\mathbb{Z}/p\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file. +\code $> ./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off \endcode +\code Alpha complex is of dimension 3 - 9273 simplices - 300 vertices. +Simplex_tree dim: 3 +2 0 0 inf +2 1 0.0682162 1.0001 +2 1 0.0934117 1.00003 +2 2 0.56444 1.03938 \endcode + +More details on the Alpha complex utilities dedicated page. + \li Persistent_cohomology/plain_homology.cpp computes the plain homology of a simple simplicial complex without filtration values. -- cgit v1.2.3 From 76480abd3edd267077021ffb44fe45c34956f348 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 12 Jun 2019 11:53:42 +0200 Subject: Rephrase --- src/Persistent_cohomology/doc/Intro_persistent_cohomology.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h index 3e0ad133..3d28c93a 100644 --- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h +++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h @@ -193,7 +193,7 @@ Alpha_complex/alpha_complex_3d_persistence.cpp computes the persistent homol More details on the Alpha complex utilities dedicated page. -Here, as CGAL computes the exact values, it is slower, but it is necessary when points are on a grid +CGAL can be forced to compute the exact values, it is slower, but it is necessary when points are on a grid for instance (the fast version `--fast` would give incorrect values). \code $> ./alpha_complex_3d_persistence ../../data/points/sphere3D_pts_on_grid.off --exact -p 2 -m 0.1 \endcode \code Simplex_tree dim: 3 -- cgit v1.2.3 From e0ba917bbb1a54d43642f14eccb0bb6af8792bc2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 13 Jun 2019 11:27:21 +0200 Subject: Modify plain_homology example to highlight persistence_dim_max parameter --- .../example/plain_homology.cpp | 26 ++++++++++++++-------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp index a2256060..be2dc4cc 100644 --- a/src/Persistent_cohomology/example/plain_homology.cpp +++ b/src/Persistent_cohomology/example/plain_homology.cpp @@ -50,26 +50,33 @@ int main() { ST st; /* Complex to build. */ - /* 1 3 */ - /* o---o */ - /* /X\ / */ + /* 1 3 5 */ + /* o---o---o */ + /* / \ / */ /* o---o o */ /* 2 0 4 */ - const short triangle012[] = {0, 1, 2}; + const short edge01[] = {0, 1}; + const short edge02[] = {0, 2}; + const short edge12[] = {1, 2}; const short edge03[] = {0, 3}; const short edge13[] = {1, 3}; + const short edge35[] = {3, 5}; const short vertex4[] = {4}; - st.insert_simplex_and_subfaces(triangle012); + st.insert_simplex_and_subfaces(edge01); + st.insert_simplex_and_subfaces(edge02); + st.insert_simplex_and_subfaces(edge12); st.insert_simplex_and_subfaces(edge03); st.insert_simplex(edge13); + st.insert_simplex_and_subfaces(edge35); st.insert_simplex(vertex4); // Sort the simplices in the order of the filtration st.initialize_filtration(); // Class for homology computation - Persistent_cohomology pcoh(st); + // We want persistent homology to be computed for the maximal dimension in the complex (persistence_dim_max = true) + Persistent_cohomology pcoh(st, true); // Initialize the coefficient field Z/2Z for homology pcoh.init_coefficients(2); @@ -82,13 +89,14 @@ int main() { // 2 0 0 inf // 2 0 0 inf // 2 1 0 inf - // means that in Z/2Z-homology, the Betti numbers are b0=2 and b1=1. + // 2 1 0 inf + // means that in Z/2Z-homology, the Betti numbers are b0=2 and b1=2. pcoh.output_diagram(); - // Print the Betti numbers are b0=2 and b1=1. + // Print the Betti numbers are b0=2 and b1=2. std::cout << std::endl; std::cout << "The Betti numbers are : "; - for (int i = 0; i < st.dimension(); i++) + for (int i = 0; i < 3; i++) std::cout << "b" << i << " = " << pcoh.betti_number(i) << " ; "; std::cout << std::endl; } -- cgit v1.2.3 From bf421955c3887d12f0ca0655b2043dc68db428fb Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 13 Jun 2019 13:48:26 +0200 Subject: Move GPL COPYING to MIT LICENSE --- COPYING | 674 ---------------------------------------------------------------- LICENSE | 21 ++ 2 files changed, 21 insertions(+), 674 deletions(-) delete mode 100644 COPYING create mode 100644 LICENSE diff --git a/COPYING b/COPYING deleted file mode 100644 index 20d40b6b..00000000 --- a/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..8af15c7c --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2014-2019 The GUDHI developers. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file -- cgit v1.2.3 From 08ddc6dbbbb1cf01a05ab23ece47c3d8cf9957ea Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 13 Jun 2019 17:07:09 +0200 Subject: Change Copyright --- .../generator/aurelien_alvarez_surfaces_in_R8.py | 31 ++++++++-------------- .../concept/SimplicialComplexForAlpha.h | 20 +++----------- .../concept/SimplicialComplexForAlpha3d.h | 14 ++-------- src/Alpha_complex/doc/Intro_alpha_complex.h | 14 ++-------- src/Alpha_complex/include/gudhi/Alpha_complex.h | 14 ++-------- src/Alpha_complex/include/gudhi/Alpha_complex_3d.h | 14 ++-------- .../include/gudhi/Alpha_complex_options.h | 14 ++-------- .../test/Alpha_complex_3d_unit_test.cpp | 14 ++-------- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 14 ++-------- .../test/Periodic_alpha_complex_3d_unit_test.cpp | 14 ++-------- .../test/Weighted_alpha_complex_3d_unit_test.cpp | 14 ++-------- ...eighted_periodic_alpha_complex_3d_unit_test.cpp | 14 ++-------- .../utilities/alpha_complex_3d_persistence.cpp | 14 ++-------- .../utilities/alpha_complex_persistence.cpp | 14 ++-------- .../doc/Gudhi_Cubical_Complex_doc.h | 14 ++-------- .../example/Random_bitmap_cubical_complex.cpp | 14 ++-------- .../include/gudhi/Bitmap_cubical_complex.h | 14 ++-------- .../include/gudhi/Bitmap_cubical_complex/counter.h | 14 ++-------- .../include/gudhi/Bitmap_cubical_complex_base.h | 14 ++-------- ...cal_complex_periodic_boundary_conditions_base.h | 14 ++-------- src/Bitmap_cubical_complex/test/Bitmap_test.cpp | 14 ++-------- .../utilities/cubical_complex_persistence.cpp | 14 ++-------- .../periodic_cubical_complex_persistence.cpp | 14 ++-------- .../benchmark/bottleneck_chrono.cpp | 14 ++-------- .../concept/Persistence_diagram.h | 14 ++-------- .../doc/Intro_bottleneck_distance.h | 14 ++-------- .../alpha_rips_persistence_bottleneck_distance.cpp | 14 ++-------- .../example/bottleneck_basic_example.cpp | 14 ++-------- src/Bottleneck_distance/include/gudhi/Bottleneck.h | 14 ++-------- .../include/gudhi/Graph_matching.h | 14 ++-------- .../include/gudhi/Internal_point.h | 14 ++-------- .../include/gudhi/Neighbors_finder.h | 14 ++-------- .../include/gudhi/Persistence_graph.h | 14 ++-------- .../test/bottleneck_unit_test.cpp | 14 ++-------- .../utilities/bottleneck_distance.cpp | 14 ++-------- .../benchmark/cech_complex_benchmark.cpp | 14 ++-------- .../concept/SimplicialComplexForCech.h | 20 +++----------- src/Cech_complex/doc/Intro_cech_complex.h | 14 ++-------- .../example/cech_complex_step_by_step.cpp | 14 ++-------- src/Cech_complex/include/gudhi/Cech_complex.h | 14 ++-------- .../include/gudhi/Cech_complex_blocker.h | 14 ++-------- src/Cech_complex/test/test_cech_complex.cpp | 14 ++-------- src/Cech_complex/utilities/cech_persistence.cpp | 14 ++-------- src/Contraction/example/Garland_heckbert.cpp | 21 +++------------ .../example/Garland_heckbert/Error_quadric.h | 21 +++------------ src/Contraction/example/Rips_contraction.cpp | 14 ++-------- .../include/gudhi/Contraction/Edge_profile.h | 20 +++----------- .../Contraction/policies/Contraction_visitor.h | 20 +++----------- .../gudhi/Contraction/policies/Cost_policy.h | 20 +++----------- .../Contraction/policies/Dummy_valid_contraction.h | 20 +++----------- .../gudhi/Contraction/policies/Edge_length_cost.h | 20 +++----------- .../Contraction/policies/First_vertex_placement.h | 20 +++----------- .../policies/Link_condition_valid_contraction.h | 20 +++----------- .../gudhi/Contraction/policies/Middle_placement.h | 20 +++----------- .../gudhi/Contraction/policies/Placement_policy.h | 20 +++----------- .../policies/Valid_contraction_policy.h | 20 +++----------- src/Contraction/include/gudhi/Edge_contraction.h | 14 ++-------- .../include/gudhi/Skeleton_blocker_contractor.h | 14 ++-------- src/GudhUI/gui/MainWindow.cpp | 14 ++-------- src/GudhUI/gui/MainWindow.h | 14 ++-------- src/GudhUI/gui/Menu_edge_contraction.cpp | 14 ++-------- src/GudhUI/gui/Menu_edge_contraction.h | 14 ++-------- src/GudhUI/gui/Menu_k_nearest_neighbors.cpp | 14 ++-------- src/GudhUI/gui/Menu_k_nearest_neighbors.h | 14 ++-------- src/GudhUI/gui/Menu_persistence.cpp | 21 +++------------ src/GudhUI/gui/Menu_persistence.h | 21 +++------------ src/GudhUI/gui/Menu_uniform_neighbors.cpp | 21 +++------------ src/GudhUI/gui/Menu_uniform_neighbors.h | 21 +++------------ src/GudhUI/gui/gudhui.cpp | 21 +++------------ src/GudhUI/model/Complex_typedefs.h | 21 +++------------ src/GudhUI/model/Model.h | 21 +++------------ src/GudhUI/utils/Bar_code_persistence.h | 21 +++------------ src/GudhUI/utils/Critical_points.h | 21 +++------------ src/GudhUI/utils/Edge_collapsor.h | 21 +++------------ src/GudhUI/utils/Edge_contractor.h | 21 +++------------ src/GudhUI/utils/Furthest_point_epsilon_net.h | 21 +++------------ src/GudhUI/utils/Is_manifold.h | 15 ++--------- src/GudhUI/utils/K_nearest_builder.h | 21 +++------------ src/GudhUI/utils/Lloyd_builder.h | 21 +++------------ src/GudhUI/utils/MClock.h | 21 +++------------ src/GudhUI/utils/Persistence_compute.h | 21 +++------------ src/GudhUI/utils/Rips_builder.h | 21 +++------------ src/GudhUI/utils/UI_utils.h | 21 +++------------ src/GudhUI/utils/Vertex_collapsor.h | 21 +++------------ src/GudhUI/view/Color.h | 21 +++------------ src/GudhUI/view/FirstCoordProjector.h | 21 +++------------ src/GudhUI/view/Projector3D.h | 21 +++------------ src/GudhUI/view/View_parameter.h | 21 +++------------ src/GudhUI/view/Viewer.cpp | 21 +++------------ src/GudhUI/view/Viewer.h | 21 +++------------ src/GudhUI/view/Viewer_instructor.cpp | 21 +++------------ src/GudhUI/view/Viewer_instructor.h | 21 +++------------ src/Hasse_complex/include/gudhi/Hasse_complex.h | 20 +++----------- src/Nerve_GIC/doc/Intro_graph_induced_complex.h | 14 ++-------- src/Nerve_GIC/example/CoordGIC.cpp | 14 ++-------- src/Nerve_GIC/example/FuncGIC.cpp | 14 ++-------- src/Nerve_GIC/include/gudhi/GIC.h | 14 ++-------- src/Nerve_GIC/test/test_GIC.cpp | 14 ++-------- .../utilities/KeplerMapperVisuFromTxtFile.py | 22 ++++----------- src/Nerve_GIC/utilities/Nerve.cpp | 14 ++-------- src/Nerve_GIC/utilities/VoronoiGIC.cpp | 14 ++-------- .../concept/Real_valued_topological_data.h | 14 ++-------- .../concept/Topological_data_with_averages.h | 14 ++-------- .../concept/Topological_data_with_distances.h | 14 ++-------- .../concept/Topological_data_with_scalar_product.h | 14 ++-------- .../concept/Vectorized_topological_data.h | 14 ++-------- .../doc/Persistence_representations_doc.h | 14 ++-------- .../example/persistence_heat_maps.cpp | 14 ++-------- .../example/persistence_intervals.cpp | 14 ++-------- .../example/persistence_landscape.cpp | 14 ++-------- .../example/persistence_landscape_on_grid.cpp | 14 ++-------- .../example/persistence_vectors.cpp | 14 ++-------- .../example/sliced_wasserstein.cpp | 14 ++-------- .../include/gudhi/PSSK.h | 14 ++-------- .../include/gudhi/Persistence_heat_maps.h | 14 ++-------- .../include/gudhi/Persistence_intervals.h | 14 ++-------- .../gudhi/Persistence_intervals_with_distances.h | 14 ++-------- .../include/gudhi/Persistence_landscape.h | 14 ++-------- .../include/gudhi/Persistence_landscape_on_grid.h | 23 ++++------------ .../include/gudhi/Persistence_vectors.h | 14 ++-------- .../include/gudhi/Sliced_Wasserstein.h | 14 ++-------- .../gudhi/common_persistence_representations.h | 14 ++-------- .../include/gudhi/read_persistence_from_file.h | 14 ++-------- src/Persistence_representations/test/kernels.cpp | 14 ++-------- .../test/persistence_heat_maps_test.cpp | 14 ++-------- .../test/persistence_intervals_test.cpp | 14 ++-------- .../persistence_intervals_with_distances_test.cpp | 14 ++-------- .../test/persistence_lanscapes_on_grid_test.cpp | 14 ++-------- .../test/persistence_lanscapes_test.cpp | 14 ++-------- .../test/read_persistence_from_file_test.cpp | 14 ++-------- .../test/vector_representation_test.cpp | 14 ++-------- .../average_persistence_heat_maps.cpp | 14 ++-------- .../compute_distance_of_persistence_heat_maps.cpp | 14 ++-------- ...ute_scalar_product_of_persistence_heat_maps.cpp | 14 ++-------- ...h_m_weighted_by_arctan_of_their_persistence.cpp | 30 +++++++-------------- ...te_p_h_m_weighted_by_distance_from_diagonal.cpp | 14 ++-------- ...ate_p_h_m_weighted_by_squared_diag_distance.cpp | 14 ++-------- .../create_persistence_heat_maps.cpp | 14 ++-------- .../persistence_heat_maps/create_pssk.cpp | 14 ++-------- .../plot_persistence_heat_map.cpp | 14 ++-------- ...te_birth_death_range_in_persistence_diagram.cpp | 14 ++-------- .../compute_bottleneck_distance.cpp | 14 ++-------- .../compute_number_of_dominant_intervals.cpp | 14 ++-------- .../plot_histogram_of_intervals_lengths.cpp | 14 ++-------- .../plot_persistence_Betti_numbers.cpp | 14 ++-------- .../plot_persistence_intervals.cpp | 14 ++-------- .../persistence_landscapes/average_landscapes.cpp | 14 ++-------- .../compute_distance_of_landscapes.cpp | 14 ++-------- .../compute_scalar_product_of_landscapes.cpp | 14 ++-------- .../persistence_landscapes/create_landscapes.cpp | 14 ++-------- .../persistence_landscapes/plot_landscapes.cpp | 14 ++-------- .../average_landscapes_on_grid.cpp | 14 ++-------- .../compute_distance_of_landscapes_on_grid.cpp | 14 ++-------- ...ompute_scalar_product_of_landscapes_on_grid.cpp | 14 ++-------- .../create_landscapes_on_grid.cpp | 14 ++-------- .../plot_landscapes_on_grid.cpp | 14 ++-------- .../average_persistence_vectors.cpp | 14 ++-------- .../compute_distance_of_persistence_vectors.cpp | 14 ++-------- ...mpute_scalar_product_of_persistence_vectors.cpp | 14 ++-------- .../create_persistence_vectors.cpp | 14 ++-------- .../plot_persistence_vectors.cpp | 14 ++-------- .../benchmark/performance_rips_persistence.cpp | 20 +++----------- .../concept/CoefficientField.h | 30 +++++++-------------- .../concept/FilteredComplex.h | 30 +++++++-------------- .../concept/PersistentHomology.h | 30 +++++++-------------- .../doc/Intro_persistent_cohomology.h | 14 ++-------- .../example/custom_persistence_sort.cpp | 14 ++-------- .../example/persistence_from_file.cpp | 20 +++----------- .../persistence_from_simple_simplex_tree.cpp | 14 ++-------- .../example/plain_homology.cpp | 14 ++-------- .../example/rips_multifield_persistence.cpp | 20 +++----------- .../example/rips_persistence_step_by_step.cpp | 20 +++----------- .../rips_persistence_via_boundary_matrix.cpp | 20 +++----------- .../include/gudhi/Persistent_cohomology.h | 14 ++-------- .../include/gudhi/Persistent_cohomology/Field_Zp.h | 14 ++-------- .../gudhi/Persistent_cohomology/Multi_field.h | 14 ++-------- .../Persistent_cohomology_column.h | 14 ++-------- .../concept/SimplicialComplexForRips.h | 20 +++----------- src/Rips_complex/doc/Intro_rips_complex.h | 14 ++-------- src/Rips_complex/include/gudhi/Rips_complex.h | 14 ++-------- .../include/gudhi/Sparse_rips_complex.h | 14 ++-------- src/Rips_complex/test/test_rips_complex.cpp | 14 ++-------- .../rips_correlation_matrix_persistence.cpp | 14 ++-------- .../utilities/rips_distance_matrix_persistence.cpp | 14 ++-------- src/Rips_complex/utilities/rips_persistence.cpp | 14 ++-------- .../utilities/sparse_rips_persistence.cpp | 14 ++-------- src/Simplex_tree/concept/FiltrationValue.h | 30 +++++++-------------- src/Simplex_tree/concept/IndexingTag.h | 30 +++++++-------------- src/Simplex_tree/concept/SimplexKey.h | 30 +++++++-------------- src/Simplex_tree/concept/SimplexTreeOptions.h | 30 +++++++-------------- src/Simplex_tree/concept/VertexHandle.h | 30 +++++++-------------- src/Simplex_tree/doc/Intro_simplex_tree.h | 14 ++-------- .../example/cech_complex_cgal_mini_sphere_3d.cpp | 14 ++-------- ...e_alpha_shapes_3_simplex_tree_from_off_file.cpp | 14 ++-------- .../example/graph_expansion_with_blocker.cpp | 14 ++-------- src/Simplex_tree/example/mini_simplex_tree.cpp | 14 ++-------- src/Simplex_tree/example/simple_simplex_tree.cpp | 14 ++-------- .../example/simplex_tree_from_cliques_of_graph.cpp | 14 ++-------- src/Simplex_tree/include/gudhi/Simplex_tree.h | 14 ++-------- .../gudhi/Simplex_tree/Simplex_tree_iterators.h | 14 ++-------- .../Simplex_tree_node_explicit_storage.h | 14 ++-------- .../gudhi/Simplex_tree/Simplex_tree_siblings.h | 14 ++-------- .../include/gudhi/Simplex_tree/indexing_tag.h | 14 ++-------- src/Skeleton_blocker/concept/SkeletonBlockerDS.h | 14 ++-------- .../concept/SkeletonBlockerGeometricDS.h | 14 ++-------- .../example/Skeleton_blocker_from_simplices.cpp | 20 +++----------- .../example/Skeleton_blocker_iteration.cpp | 20 +++----------- .../example/Skeleton_blocker_link.cpp | 20 +++----------- .../include/gudhi/Skeleton_blocker.h | 14 ++-------- .../Skeleton_blocker_complex_visitor.h | 14 ++-------- .../Skeleton_blocker_link_superior.h | 14 ++-------- .../Skeleton_blocker/Skeleton_blocker_off_io.h | 14 ++-------- .../Skeleton_blocker_simple_geometric_traits.h | 14 ++-------- .../Skeleton_blocker_simple_traits.h | 14 ++-------- .../Skeleton_blocker/Skeleton_blocker_simplex.h | 14 ++-------- .../Skeleton_blocker_sub_complex.h | 14 ++-------- .../gudhi/Skeleton_blocker/internal/Top_faces.h | 14 ++-------- .../include/gudhi/Skeleton_blocker/internal/Trie.h | 21 +++------------ .../Skeleton_blockers_blockers_iterators.h | 14 ++-------- .../iterators/Skeleton_blockers_edges_iterators.h | 14 ++-------- .../iterators/Skeleton_blockers_iterators.h | 30 +++++++-------------- .../Skeleton_blockers_simplices_iterators.h | 14 ++-------- .../Skeleton_blockers_triangles_iterators.h | 14 ++-------- .../Skeleton_blockers_vertices_iterators.h | 14 ++-------- .../include/gudhi/Skeleton_blocker_complex.h | 14 ++-------- .../gudhi/Skeleton_blocker_geometric_complex.h | 14 ++-------- .../include/gudhi/Skeleton_blocker_link_complex.h | 14 ++-------- .../gudhi/Skeleton_blocker_simplifiable_complex.h | 14 ++-------- .../test/test_skeleton_blocker_complex.cpp | 14 ++-------- .../test_skeleton_blocker_geometric_complex.cpp | 20 +++----------- .../test/test_skeleton_blocker_simplifiable.cpp | 20 +++----------- .../doc/Intro_spatial_searching.h | 14 ++-------- .../include/gudhi/Kd_tree_search.h | 14 ++-------- src/Spatial_searching/test/test_Kd_tree_search.cpp | 14 ++-------- src/Subsampling/doc/Intro_subsampling.h | 14 ++-------- .../include/gudhi/choose_n_farthest_points.h | 14 ++-------- .../include/gudhi/pick_n_random_points.h | 14 ++-------- src/Subsampling/include/gudhi/sparsify_point_set.h | 14 ++-------- .../test/test_choose_n_farthest_points.cpp | 14 ++-------- src/Subsampling/test/test_pick_n_random_points.cpp | 14 ++-------- src/Subsampling/test/test_sparsify_point_set.cpp | 14 ++-------- src/Tangential_complex/benchmark/RIB_exporter.h | 14 ++-------- src/Tangential_complex/benchmark/XML_exporter.h | 14 ++-------- .../doc/Intro_tangential_complex.h | 14 ++-------- .../include/gudhi/Tangential_complex.h | 14 ++-------- .../gudhi/Tangential_complex/Simplicial_complex.h | 14 ++-------- .../include/gudhi/Tangential_complex/config.h | 14 ++-------- .../include/gudhi/Tangential_complex/utilities.h | 14 ++-------- .../test/test_tangential_complex.cpp | 14 ++-------- src/Toplex_map/benchmark/benchmark_tm.cpp | 14 ++-------- src/Toplex_map/doc/Intro_Toplex_map.h | 14 ++-------- src/Toplex_map/example/simple_toplex_map.cpp | 14 ++-------- src/Toplex_map/include/gudhi/Lazy_toplex_map.h | 14 ++-------- src/Toplex_map/include/gudhi/Toplex_map.h | 14 ++-------- src/Toplex_map/test/lazy_toplex_map_unit_test.cpp | 14 ++-------- src/Toplex_map/test/toplex_map_unit_test.cpp | 14 ++-------- .../concept/SimplicialComplexForWitness.h | 20 +++----------- src/Witness_complex/example/generators.h | 14 ++-------- .../include/gudhi/Active_witness/Active_witness.h | 14 ++-------- .../gudhi/Active_witness/Active_witness_iterator.h | 14 ++-------- .../gudhi/Euclidean_strong_witness_complex.h | 14 ++-------- .../include/gudhi/Euclidean_witness_complex.h | 14 ++-------- .../include/gudhi/Strong_witness_complex.h | 14 ++-------- .../include/gudhi/Witness_complex.h | 14 ++-------- .../include/gudhi/Witness_complex/all_faces_in.h | 14 ++-------- .../utilities/strong_witness_persistence.cpp | 14 ++-------- .../utilities/weak_witness_persistence.cpp | 14 ++-------- .../Graph_simplicial_complex_benchmark.cpp | 14 ++-------- src/common/doc/file_formats.h | 30 +++++++-------------- src/common/include/gudhi/Clock.h | 20 +++----------- src/common/include/gudhi/Debug_utils.h | 14 ++-------- src/common/include/gudhi/Null_output_iterator.h | 14 ++-------- src/common/include/gudhi/Off_reader.h | 21 +++------------ src/common/include/gudhi/Point.h | 21 +++------------ src/common/include/gudhi/Points_3D_off_io.h | 14 ++-------- src/common/include/gudhi/Points_off_io.h | 14 ++-------- src/common/include/gudhi/Simple_object_pool.h | 14 ++-------- src/common/include/gudhi/Unitary_tests_utils.h | 14 ++-------- src/common/include/gudhi/allocator.h | 14 ++-------- src/common/include/gudhi/console_color.h | 14 ++-------- src/common/include/gudhi/distance_functions.h | 20 +++----------- .../include/gudhi/graph_simplicial_complex.h | 20 +++----------- src/common/include/gudhi/random_point_generators.h | 14 ++-------- src/common/include/gudhi/reader_utils.h | 14 ++-------- .../include/gudhi/writing_persistence_to_file.h | 14 ++-------- src/common/test/test_distance_matrix_reader.cpp | 14 ++-------- .../test/test_persistence_intervals_reader.cpp | 14 ++-------- src/common/test/test_points_off_reader.cpp | 14 ++-------- .../utilities/off_file_from_shape_generator.cpp | 14 ++-------- src/cython/cython/alpha_complex.pyx | 22 ++++----------- src/cython/cython/bottleneck_distance.pyx | 22 ++++----------- src/cython/cython/cubical_complex.pyx | 22 ++++----------- .../cython/euclidean_strong_witness_complex.pyx | 22 ++++----------- src/cython/cython/euclidean_witness_complex.pyx | 22 ++++----------- src/cython/cython/nerve_gic.pyx | 22 ++++----------- src/cython/cython/off_reader.pyx | 22 ++++----------- src/cython/cython/periodic_cubical_complex.pyx | 22 ++++----------- src/cython/cython/persistence_graphical_tools.py | 22 ++++----------- src/cython/cython/reader_utils.pyx | 22 ++++----------- src/cython/cython/rips_complex.pyx | 22 ++++----------- src/cython/cython/simplex_tree.pyx | 22 ++++----------- src/cython/cython/strong_witness_complex.pyx | 22 ++++----------- src/cython/cython/subsampling.pyx | 22 ++++----------- src/cython/cython/tangential_complex.pyx | 22 ++++----------- src/cython/cython/witness_complex.pyx | 22 ++++----------- ...ex_diagram_persistence_from_off_file_example.py | 22 ++++----------- .../example/alpha_complex_from_points_example.py | 22 ++++----------- .../alpha_rips_persistence_bottleneck_distance.py | 22 ++++----------- src/cython/example/bottleneck_basic_example.py | 22 ++++----------- .../example/coordinate_graph_induced_complex.py | 22 ++++----------- ...ex_diagram_persistence_from_off_file_example.py | 22 ++++----------- ...ex_diagram_persistence_from_off_file_example.py | 22 ++++----------- .../example/functional_graph_induced_complex.py | 22 ++++----------- .../example/gudhi_graphical_tools_example.py | 22 ++++----------- src/cython/example/nerve_of_a_covering.py | 22 ++++----------- ...arcode_persistence_from_perseus_file_example.py | 22 ++++----------- .../random_cubical_complex_persistence_example.py | 22 ++++----------- ...istence_from_correlation_matrix_file_example.py | 22 ++++----------- ...ersistence_from_distance_matrix_file_example.py | 22 ++++----------- ...ex_diagram_persistence_from_off_file_example.py | 22 ++++----------- .../example/rips_complex_from_points_example.py | 22 ++++----------- src/cython/example/rips_persistence_diagram.py | 22 ++++----------- src/cython/example/simplex_tree_example.py | 22 ++++----------- .../example/sparse_rips_persistence_diagram.py | 22 ++++----------- ...complex_plain_homology_from_off_file_example.py | 22 ++++----------- .../example/voronoi_graph_induced_complex.py | 22 ++++----------- .../witness_complex_from_nearest_landmark_table.py | 22 ++++----------- src/cython/gudhi.pyx.in | 22 ++++----------- src/cython/include/Alpha_complex_interface.h | 14 ++-------- src/cython/include/Bottleneck_distance_interface.h | 14 ++-------- src/cython/include/Cubical_complex_interface.h | 14 ++-------- .../Euclidean_strong_witness_complex_interface.h | 14 ++-------- .../include/Euclidean_witness_complex_interface.h | 14 ++-------- src/cython/include/Nerve_gic_interface.h | 14 ++-------- src/cython/include/Off_reader_interface.h | 14 ++-------- .../include/Persistent_cohomology_interface.h | 14 ++-------- src/cython/include/Reader_utils_interface.h | 14 ++-------- src/cython/include/Rips_complex_interface.h | 14 ++-------- src/cython/include/Simplex_tree_interface.h | 14 ++-------- .../include/Strong_witness_complex_interface.h | 14 ++-------- src/cython/include/Subsampling_interface.h | 14 ++-------- src/cython/include/Tangential_complex_interface.h | 14 ++-------- src/cython/include/Witness_complex_interface.h | 14 ++-------- src/cython/setup.py.in | 22 ++++----------- src/cython/test/test_alpha_complex.py | 22 ++++----------- src/cython/test/test_bottleneck_distance.py | 22 ++++----------- src/cython/test/test_cover_complex.py | 22 ++++----------- src/cython/test/test_cubical_complex.py | 22 ++++----------- src/cython/test/test_euclidean_witness_complex.py | 22 ++++----------- src/cython/test/test_reader_utils.py | 22 ++++----------- src/cython/test/test_rips_complex.py | 22 ++++----------- src/cython/test/test_simplex_tree.py | 22 ++++----------- src/cython/test/test_subsampling.py | 22 ++++----------- src/cython/test/test_tangential_complex.py | 22 ++++----------- src/cython/test/test_witness_complex.py | 22 ++++----------- 355 files changed, 1075 insertions(+), 4906 deletions(-) diff --git a/data/points/generator/aurelien_alvarez_surfaces_in_R8.py b/data/points/generator/aurelien_alvarez_surfaces_in_R8.py index 57773c4c..cc23ca98 100755 --- a/data/points/generator/aurelien_alvarez_surfaces_in_R8.py +++ b/data/points/generator/aurelien_alvarez_surfaces_in_R8.py @@ -1,28 +1,19 @@ -# This file is part of the Gudhi Library. The Gudhi library -# (Geometric Understanding in Higher Dimensions) is a generic C++ -# library for computational topology. -# -# Author(s): Aurélien Alvarez -# -# Copyright (C) 2016 Université d'Orléans (France) -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . +#!/usr/bin/env python import numpy as np import random from math import factorial +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Aurélien Alvarez + + Copyright (C) 2016 Université d'Orléans (France) + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + I = complex(0,1) ################################################# diff --git a/src/Alpha_complex/concept/SimplicialComplexForAlpha.h b/src/Alpha_complex/concept/SimplicialComplexForAlpha.h index ba97c802..1c6c3b0c 100644 --- a/src/Alpha_complex/concept/SimplicialComplexForAlpha.h +++ b/src/Alpha_complex/concept/SimplicialComplexForAlpha.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_ diff --git a/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h b/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h index 7acdf105..1dc8c037 100644 --- a/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h +++ b/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_3D_H_ diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index ab7c4794..5b332e47 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_ALPHA_COMPLEX_INTRO_ALPHA_COMPLEX_H_ diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index af9f59ea..d5865671 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef ALPHA_COMPLEX_H_ diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h index 0bf12b1a..47407b38 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef ALPHA_COMPLEX_3D_H_ diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex_options.h b/src/Alpha_complex/include/gudhi/Alpha_complex_options.h index 7a555fa1..bf29039b 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex_options.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex_options.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef ALPHA_COMPLEX_OPTIONS_H_ diff --git a/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp index ec905d5b..8e4121ae 100644 --- a/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index b46b6da5..de0cf471 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp index ed4cbff0..4449e5ed 100644 --- a/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp index c16b3718..201dbce4 100644 --- a/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp index e8ac83e5..9d711c41 100644 --- a/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp index 09c84eb3..b9991b83 100644 --- a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp +++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp index 8e6c40b7..12a8740e 100644 --- a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp +++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h index 5fa02a5e..237f79ad 100644 --- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h +++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ diff --git a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp index 6eb24040..533aec91 100644 --- a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp +++ b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ // for persistence algorithm diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index cc19b8b5..1954eb0c 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef BITMAP_CUBICAL_COMPLEX_H_ diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h index f82d4cc3..84d53778 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef BITMAP_CUBICAL_COMPLEX_COUNTER_H_ diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h index f5e005b2..a0ad40fc 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef BITMAP_CUBICAL_COMPLEX_BASE_H_ diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h index 8c35f590..4afed33c 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef BITMAP_CUBICAL_COMPLEX_PERIODIC_BOUNDARY_CONDITIONS_BASE_H_ diff --git a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp index 6a917c25..d3ed75d3 100644 --- a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp +++ b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp index 170aa684..6cf0889f 100644 --- a/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp +++ b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp index e9ba5495..700d90f4 100644 --- a/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp +++ b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp b/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp index acafb199..db3c9815 100644 --- a/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp +++ b/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Bottleneck_distance/concept/Persistence_diagram.h b/src/Bottleneck_distance/concept/Persistence_diagram.h index d016faf4..de8021cc 100644 --- a/src/Bottleneck_distance/concept/Persistence_diagram.h +++ b/src/Bottleneck_distance/concept/Persistence_diagram.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_BOTTLENECK_DISTANCE_PERSISTENCE_DIAGRAM_H_ diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h index 6fd058a8..48078903 100644 --- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h +++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_BOTTLENECK_DISTANCE_INTRO_BOTTLENECK_DISTANCE_H_ diff --git a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp index 2db1ef80..c5d66121 100644 --- a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp +++ b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Bottleneck_distance/example/bottleneck_basic_example.cpp b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp index 3df7d12d..b74c8245 100644 --- a/src/Bottleneck_distance/example/bottleneck_basic_example.cpp +++ b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Bottleneck_distance/include/gudhi/Bottleneck.h b/src/Bottleneck_distance/include/gudhi/Bottleneck.h index 7a553006..c56dcb2d 100644 --- a/src/Bottleneck_distance/include/gudhi/Bottleneck.h +++ b/src/Bottleneck_distance/include/gudhi/Bottleneck.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef BOTTLENECK_H_ diff --git a/src/Bottleneck_distance/include/gudhi/Graph_matching.h b/src/Bottleneck_distance/include/gudhi/Graph_matching.h index 313e7d9c..6385ae30 100644 --- a/src/Bottleneck_distance/include/gudhi/Graph_matching.h +++ b/src/Bottleneck_distance/include/gudhi/Graph_matching.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GRAPH_MATCHING_H_ diff --git a/src/Bottleneck_distance/include/gudhi/Internal_point.h b/src/Bottleneck_distance/include/gudhi/Internal_point.h index 7f350f64..9d268af3 100644 --- a/src/Bottleneck_distance/include/gudhi/Internal_point.h +++ b/src/Bottleneck_distance/include/gudhi/Internal_point.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INTERNAL_POINT_H_ diff --git a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h index 8c12d353..8a75384c 100644 --- a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h +++ b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef NEIGHBORS_FINDER_H_ diff --git a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h index cb163623..3e82a4c9 100644 --- a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h +++ b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENCE_GRAPH_H_ diff --git a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp index bce88e13..5f20892c 100644 --- a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp +++ b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ diff --git a/src/Bottleneck_distance/utilities/bottleneck_distance.cpp b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp index 8f724f95..fc03cb21 100644 --- a/src/Bottleneck_distance/utilities/bottleneck_distance.cpp +++ b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index 86314930..df12e06d 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Cech_complex/concept/SimplicialComplexForCech.h b/src/Cech_complex/concept/SimplicialComplexForCech.h index 89231eec..00c7df3a 100644 --- a/src/Cech_complex/concept/SimplicialComplexForCech.h +++ b/src/Cech_complex/concept/SimplicialComplexForCech.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_CECH_COMPLEX_SIMPLICIAL_COMPLEX_FOR_CECH_H_ diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h index 4483bcb9..250c91fa 100644 --- a/src/Cech_complex/doc/Intro_cech_complex.h +++ b/src/Cech_complex/doc/Intro_cech_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_CECH_COMPLEX_INTRO_CECH_COMPLEX_H_ diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index 6fbbde5b..d9d17b26 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index f9b8a269..cc69f35f 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CECH_COMPLEX_H_ diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index b0d347b1..c0c03bb0 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CECH_COMPLEX_BLOCKER_H_ diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index 9039169c..8df71b15 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp index 93e92695..78d47a5a 100644 --- a/src/Cech_complex/utilities/cech_persistence.cpp +++ b/src/Cech_complex/utilities/cech_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Contraction/example/Garland_heckbert.cpp b/src/Contraction/example/Garland_heckbert.cpp index 08dd932e..9c0b5205 100644 --- a/src/Contraction/example/Garland_heckbert.cpp +++ b/src/Contraction/example/Garland_heckbert.cpp @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ diff --git a/src/Contraction/example/Garland_heckbert/Error_quadric.h b/src/Contraction/example/Garland_heckbert/Error_quadric.h index 8bd9b545..49250d7a 100644 --- a/src/Contraction/example/Garland_heckbert/Error_quadric.h +++ b/src/Contraction/example/Garland_heckbert/Error_quadric.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GARLAND_HECKBERT_ERROR_QUADRIC_H_ diff --git a/src/Contraction/example/Rips_contraction.cpp b/src/Contraction/example/Rips_contraction.cpp index 7f9b150a..c41a9d94 100644 --- a/src/Contraction/example/Rips_contraction.cpp +++ b/src/Contraction/example/Rips_contraction.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include #include diff --git a/src/Contraction/include/gudhi/Contraction/Edge_profile.h b/src/Contraction/include/gudhi/Contraction/Edge_profile.h index 30b1b80a..78a7afd1 100644 --- a/src/Contraction/include/gudhi/Contraction/Edge_profile.h +++ b/src/Contraction/include/gudhi/Contraction/Edge_profile.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_EDGE_PROFILE_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Contraction_visitor.h b/src/Contraction/include/gudhi/Contraction/policies/Contraction_visitor.h index fa02308b..243bc51c 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Contraction_visitor.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Contraction_visitor.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_CONTRACTION_VISITOR_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Cost_policy.h b/src/Contraction/include/gudhi/Contraction/policies/Cost_policy.h index 04ce36b6..97114794 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Cost_policy.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Cost_policy.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_COST_POLICY_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Dummy_valid_contraction.h b/src/Contraction/include/gudhi/Contraction/policies/Dummy_valid_contraction.h index a5567454..27a4dc7a 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Dummy_valid_contraction.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Dummy_valid_contraction.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_DUMMY_VALID_CONTRACTION_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Edge_length_cost.h b/src/Contraction/include/gudhi/Contraction/policies/Edge_length_cost.h index 1b7a825b..97589385 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Edge_length_cost.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Edge_length_cost.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_EDGE_LENGTH_COST_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/First_vertex_placement.h b/src/Contraction/include/gudhi/Contraction/policies/First_vertex_placement.h index 0b9f8775..005b80e0 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/First_vertex_placement.h +++ b/src/Contraction/include/gudhi/Contraction/policies/First_vertex_placement.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_FIRST_VERTEX_PLACEMENT_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h b/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h index 8c869830..2e7ea481 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_LINK_CONDITION_VALID_CONTRACTION_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Middle_placement.h b/src/Contraction/include/gudhi/Contraction/policies/Middle_placement.h index 0ba23a35..7dcf708b 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Middle_placement.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Middle_placement.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_MIDDLE_PLACEMENT_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Placement_policy.h b/src/Contraction/include/gudhi/Contraction/policies/Placement_policy.h index 19509fad..5f97d6a7 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Placement_policy.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Placement_policy.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_PLACEMENT_POLICY_H_ diff --git a/src/Contraction/include/gudhi/Contraction/policies/Valid_contraction_policy.h b/src/Contraction/include/gudhi/Contraction/policies/Valid_contraction_policy.h index 8a91f0b5..413c5bd6 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Valid_contraction_policy.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Valid_contraction_policy.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONTRACTION_POLICIES_VALID_CONTRACTION_POLICY_H_ diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index fcd06996..5cd024bd 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef EDGE_CONTRACTION_H_ diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h index 13086161..e1f3b3c2 100644 --- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h +++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_CONTRACTOR_H_ diff --git a/src/GudhUI/gui/MainWindow.cpp b/src/GudhUI/gui/MainWindow.cpp index b11b80e9..05bd42c8 100644 --- a/src/GudhUI/gui/MainWindow.cpp +++ b/src/GudhUI/gui/MainWindow.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include "MainWindow.h" diff --git a/src/GudhUI/gui/MainWindow.h b/src/GudhUI/gui/MainWindow.h index 6076c2ee..d9281e57 100644 --- a/src/GudhUI/gui/MainWindow.h +++ b/src/GudhUI/gui/MainWindow.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GUI_MAINWINDOW_H_ diff --git a/src/GudhUI/gui/Menu_edge_contraction.cpp b/src/GudhUI/gui/Menu_edge_contraction.cpp index 041bdf9e..5a517cef 100644 --- a/src/GudhUI/gui/Menu_edge_contraction.cpp +++ b/src/GudhUI/gui/Menu_edge_contraction.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GUI_MENU_EDGE_CONTRACTION_CPP_ diff --git a/src/GudhUI/gui/Menu_edge_contraction.h b/src/GudhUI/gui/Menu_edge_contraction.h index 0ef7b267..7cbf60ee 100644 --- a/src/GudhUI/gui/Menu_edge_contraction.h +++ b/src/GudhUI/gui/Menu_edge_contraction.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GUI_MENU_EDGE_CONTRACTION_H_ diff --git a/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp b/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp index b1ad15c8..69ed6c56 100644 --- a/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp +++ b/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include "Menu_k_nearest_neighbors.h" diff --git a/src/GudhUI/gui/Menu_k_nearest_neighbors.h b/src/GudhUI/gui/Menu_k_nearest_neighbors.h index 56b5b63d..e29ffc0b 100644 --- a/src/GudhUI/gui/Menu_k_nearest_neighbors.h +++ b/src/GudhUI/gui/Menu_k_nearest_neighbors.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GUI_MENU_K_NEAREST_NEIGHBORS_H_ diff --git a/src/GudhUI/gui/Menu_persistence.cpp b/src/GudhUI/gui/Menu_persistence.cpp index ec990559..58556cd1 100644 --- a/src/GudhUI/gui/Menu_persistence.cpp +++ b/src/GudhUI/gui/Menu_persistence.cpp @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ diff --git a/src/GudhUI/gui/Menu_persistence.h b/src/GudhUI/gui/Menu_persistence.h index 32f0c5ca..a4d94636 100644 --- a/src/GudhUI/gui/Menu_persistence.h +++ b/src/GudhUI/gui/Menu_persistence.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GUI_MENU_PERSISTENCE_H_ diff --git a/src/GudhUI/gui/Menu_uniform_neighbors.cpp b/src/GudhUI/gui/Menu_uniform_neighbors.cpp index 7f392b6c..f27a7e12 100644 --- a/src/GudhUI/gui/Menu_uniform_neighbors.cpp +++ b/src/GudhUI/gui/Menu_uniform_neighbors.cpp @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include "Menu_uniform_neighbors.h" diff --git a/src/GudhUI/gui/Menu_uniform_neighbors.h b/src/GudhUI/gui/Menu_uniform_neighbors.h index 88a3823b..131c8e29 100644 --- a/src/GudhUI/gui/Menu_uniform_neighbors.h +++ b/src/GudhUI/gui/Menu_uniform_neighbors.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GUI_MENU_UNIFORM_NEIGHBORS_H_ diff --git a/src/GudhUI/gui/gudhui.cpp b/src/GudhUI/gui/gudhui.cpp index 2a100fd5..0a7296f6 100644 --- a/src/GudhUI/gui/gudhui.cpp +++ b/src/GudhUI/gui/gudhui.cpp @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include "MainWindow.h" diff --git a/src/GudhUI/model/Complex_typedefs.h b/src/GudhUI/model/Complex_typedefs.h index 347db1e3..92578111 100644 --- a/src/GudhUI/model/Complex_typedefs.h +++ b/src/GudhUI/model/Complex_typedefs.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef MODEL_COMPLEX_TYPEDEFS_H_ diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h index 1d5cc087..b17a5c8f 100644 --- a/src/GudhUI/model/Model.h +++ b/src/GudhUI/model/Model.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef MODEL_MODEL_H_ diff --git a/src/GudhUI/utils/Bar_code_persistence.h b/src/GudhUI/utils/Bar_code_persistence.h index 49c87b3c..cd9b009f 100644 --- a/src/GudhUI/utils/Bar_code_persistence.h +++ b/src/GudhUI/utils/Bar_code_persistence.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include // isfinite diff --git a/src/GudhUI/utils/Critical_points.h b/src/GudhUI/utils/Critical_points.h index fbd690f8..32fcf32e 100644 --- a/src/GudhUI/utils/Critical_points.h +++ b/src/GudhUI/utils/Critical_points.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_CRITICAL_POINTS_H_ diff --git a/src/GudhUI/utils/Edge_collapsor.h b/src/GudhUI/utils/Edge_collapsor.h index b3cc7df7..89e032f0 100644 --- a/src/GudhUI/utils/Edge_collapsor.h +++ b/src/GudhUI/utils/Edge_collapsor.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_EDGE_COLLAPSOR_H_ diff --git a/src/GudhUI/utils/Edge_contractor.h b/src/GudhUI/utils/Edge_contractor.h index 090baabe..0707b186 100644 --- a/src/GudhUI/utils/Edge_contractor.h +++ b/src/GudhUI/utils/Edge_contractor.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_EDGE_CONTRACTOR_H_ diff --git a/src/GudhUI/utils/Furthest_point_epsilon_net.h b/src/GudhUI/utils/Furthest_point_epsilon_net.h index dbb6661c..6eb71071 100644 --- a/src/GudhUI/utils/Furthest_point_epsilon_net.h +++ b/src/GudhUI/utils/Furthest_point_epsilon_net.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_FURTHEST_POINT_EPSILON_NET_H_ diff --git a/src/GudhUI/utils/Is_manifold.h b/src/GudhUI/utils/Is_manifold.h index 732df607..276f4332 100644 --- a/src/GudhUI/utils/Is_manifold.h +++ b/src/GudhUI/utils/Is_manifold.h @@ -9,19 +9,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ diff --git a/src/GudhUI/utils/K_nearest_builder.h b/src/GudhUI/utils/K_nearest_builder.h index 14851d96..34483e58 100644 --- a/src/GudhUI/utils/K_nearest_builder.h +++ b/src/GudhUI/utils/K_nearest_builder.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_K_NEAREST_BUILDER_H_ diff --git a/src/GudhUI/utils/Lloyd_builder.h b/src/GudhUI/utils/Lloyd_builder.h index 67595d33..c042564f 100644 --- a/src/GudhUI/utils/Lloyd_builder.h +++ b/src/GudhUI/utils/Lloyd_builder.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_LLOYD_BUILDER_H_ diff --git a/src/GudhUI/utils/MClock.h b/src/GudhUI/utils/MClock.h index 992f6fa5..54b28211 100644 --- a/src/GudhUI/utils/MClock.h +++ b/src/GudhUI/utils/MClock.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_MCLOCK_H_ diff --git a/src/GudhUI/utils/Persistence_compute.h b/src/GudhUI/utils/Persistence_compute.h index c8afded9..874f31cf 100644 --- a/src/GudhUI/utils/Persistence_compute.h +++ b/src/GudhUI/utils/Persistence_compute.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ diff --git a/src/GudhUI/utils/Rips_builder.h b/src/GudhUI/utils/Rips_builder.h index ed62c1c0..aba1a8e4 100644 --- a/src/GudhUI/utils/Rips_builder.h +++ b/src/GudhUI/utils/Rips_builder.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_RIPS_BUILDER_H_ diff --git a/src/GudhUI/utils/UI_utils.h b/src/GudhUI/utils/UI_utils.h index 67a02869..571a3cf2 100644 --- a/src/GudhUI/utils/UI_utils.h +++ b/src/GudhUI/utils/UI_utils.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_UI_UTILS_H_ diff --git a/src/GudhUI/utils/Vertex_collapsor.h b/src/GudhUI/utils/Vertex_collapsor.h index fca57f7d..030e4bb0 100644 --- a/src/GudhUI/utils/Vertex_collapsor.h +++ b/src/GudhUI/utils/Vertex_collapsor.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UTILS_VERTEX_COLLAPSOR_H_ diff --git a/src/GudhUI/view/Color.h b/src/GudhUI/view/Color.h index 808dc2d8..791cca51 100644 --- a/src/GudhUI/view/Color.h +++ b/src/GudhUI/view/Color.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef VIEW_COLOR_H_ diff --git a/src/GudhUI/view/FirstCoordProjector.h b/src/GudhUI/view/FirstCoordProjector.h index 3f8a6fd9..673485e3 100644 --- a/src/GudhUI/view/FirstCoordProjector.h +++ b/src/GudhUI/view/FirstCoordProjector.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef VIEW_FIRSTCOORDPROJECTOR_H_ diff --git a/src/GudhUI/view/Projector3D.h b/src/GudhUI/view/Projector3D.h index a1421f51..574756fd 100644 --- a/src/GudhUI/view/Projector3D.h +++ b/src/GudhUI/view/Projector3D.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef VIEW_PROJECTOR3D_H_ diff --git a/src/GudhUI/view/View_parameter.h b/src/GudhUI/view/View_parameter.h index 578a0268..dfd3aa41 100644 --- a/src/GudhUI/view/View_parameter.h +++ b/src/GudhUI/view/View_parameter.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef VIEW_VIEW_PARAMETER_H_ diff --git a/src/GudhUI/view/Viewer.cpp b/src/GudhUI/view/Viewer.cpp index 42e35d6c..6b17c833 100644 --- a/src/GudhUI/view/Viewer.cpp +++ b/src/GudhUI/view/Viewer.cpp @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include "Viewer.h" diff --git a/src/GudhUI/view/Viewer.h b/src/GudhUI/view/Viewer.h index 414044ef..ec165031 100644 --- a/src/GudhUI/view/Viewer.h +++ b/src/GudhUI/view/Viewer.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef VIEW_VIEWER_H_ diff --git a/src/GudhUI/view/Viewer_instructor.cpp b/src/GudhUI/view/Viewer_instructor.cpp index a9dc4525..9a0f1cd3 100644 --- a/src/GudhUI/view/Viewer_instructor.cpp +++ b/src/GudhUI/view/Viewer_instructor.cpp @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/GudhUI/view/Viewer_instructor.h b/src/GudhUI/view/Viewer_instructor.h index 4b06acb8..58cbcd31 100644 --- a/src/GudhUI/view/Viewer_instructor.h +++ b/src/GudhUI/view/Viewer_instructor.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef VIEW_VIEWER_INSTRUCTOR_H_ diff --git a/src/Hasse_complex/include/gudhi/Hasse_complex.h b/src/Hasse_complex/include/gudhi/Hasse_complex.h index efcaea55..209fd0b9 100644 --- a/src/Hasse_complex/include/gudhi/Hasse_complex.h +++ b/src/Hasse_complex/include/gudhi/Hasse_complex.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef HASSE_COMPLEX_H_ diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h index e72d63dd..d709baec 100644 --- a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h +++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_ diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp index 9889b198..b3a79233 100644 --- a/src/Nerve_GIC/example/CoordGIC.cpp +++ b/src/Nerve_GIC/example/CoordGIC.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Nerve_GIC/example/FuncGIC.cpp b/src/Nerve_GIC/example/FuncGIC.cpp index 1f5de999..2f0b5f2b 100644 --- a/src/Nerve_GIC/example/FuncGIC.cpp +++ b/src/Nerve_GIC/example/FuncGIC.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h index 11bb4e85..d98deeac 100644 --- a/src/Nerve_GIC/include/gudhi/GIC.h +++ b/src/Nerve_GIC/include/gudhi/GIC.h @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GIC_H_ diff --git a/src/Nerve_GIC/test/test_GIC.cpp b/src/Nerve_GIC/test/test_GIC.cpp index 0db2cce2..06b3f832 100644 --- a/src/Nerve_GIC/test/test_GIC.cpp +++ b/src/Nerve_GIC/test/test_GIC.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py index 701e7a52..e3101549 100755 --- a/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py +++ b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py @@ -5,26 +5,14 @@ import numpy as np from collections import defaultdict import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Mathieu Carriere +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Mathieu Carriere Copyright (C) 2017 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Mathieu Carriere" diff --git a/src/Nerve_GIC/utilities/Nerve.cpp b/src/Nerve_GIC/utilities/Nerve.cpp index 667129e0..ef8e2d7e 100644 --- a/src/Nerve_GIC/utilities/Nerve.cpp +++ b/src/Nerve_GIC/utilities/Nerve.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Nerve_GIC/utilities/VoronoiGIC.cpp b/src/Nerve_GIC/utilities/VoronoiGIC.cpp index 33deca40..fabe35c9 100644 --- a/src/Nerve_GIC/utilities/VoronoiGIC.cpp +++ b/src/Nerve_GIC/utilities/VoronoiGIC.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/concept/Real_valued_topological_data.h b/src/Persistence_representations/concept/Real_valued_topological_data.h index 22ef6d72..9b29a9bd 100644 --- a/src/Persistence_representations/concept/Real_valued_topological_data.h +++ b/src/Persistence_representations/concept/Real_valued_topological_data.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_REAL_VALUED_TOPOLOGICAL_DATA_H_ diff --git a/src/Persistence_representations/concept/Topological_data_with_averages.h b/src/Persistence_representations/concept/Topological_data_with_averages.h index aa64467f..b1b5ca80 100644 --- a/src/Persistence_representations/concept/Topological_data_with_averages.h +++ b/src/Persistence_representations/concept/Topological_data_with_averages.h @@ -7,18 +7,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_TOPOLOGICAL_DATA_WITH_AVERAGES_H_ diff --git a/src/Persistence_representations/concept/Topological_data_with_distances.h b/src/Persistence_representations/concept/Topological_data_with_distances.h index c8eb2b34..87c3d158 100644 --- a/src/Persistence_representations/concept/Topological_data_with_distances.h +++ b/src/Persistence_representations/concept/Topological_data_with_distances.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_TOPOLOGICAL_DATA_WITH_DISTANCES_H_ diff --git a/src/Persistence_representations/concept/Topological_data_with_scalar_product.h b/src/Persistence_representations/concept/Topological_data_with_scalar_product.h index 0f08b8c6..063f37ed 100644 --- a/src/Persistence_representations/concept/Topological_data_with_scalar_product.h +++ b/src/Persistence_representations/concept/Topological_data_with_scalar_product.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_TOPOLOGICAL_DATA_WITH_SCALAR_PRODUCT_H_ diff --git a/src/Persistence_representations/concept/Vectorized_topological_data.h b/src/Persistence_representations/concept/Vectorized_topological_data.h index 365105d6..dd1224d2 100644 --- a/src/Persistence_representations/concept/Vectorized_topological_data.h +++ b/src/Persistence_representations/concept/Vectorized_topological_data.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_VECTORIZED_TOPOLOGICAL_DATA_H_ diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index 668904c9..111e532b 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_GUDHI_STAT_H_ diff --git a/src/Persistence_representations/example/persistence_heat_maps.cpp b/src/Persistence_representations/example/persistence_heat_maps.cpp index 45208b68..a7e64bb1 100644 --- a/src/Persistence_representations/example/persistence_heat_maps.cpp +++ b/src/Persistence_representations/example/persistence_heat_maps.cpp @@ -9,18 +9,8 @@ * Modifications: * - 2018/04 MC: Add persistence heat maps computation * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/example/persistence_intervals.cpp b/src/Persistence_representations/example/persistence_intervals.cpp index b5dcf25c..c94f7fe0 100644 --- a/src/Persistence_representations/example/persistence_intervals.cpp +++ b/src/Persistence_representations/example/persistence_intervals.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/example/persistence_landscape.cpp b/src/Persistence_representations/example/persistence_landscape.cpp index 27542cf7..70aff546 100644 --- a/src/Persistence_representations/example/persistence_landscape.cpp +++ b/src/Persistence_representations/example/persistence_landscape.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/example/persistence_landscape_on_grid.cpp b/src/Persistence_representations/example/persistence_landscape_on_grid.cpp index 0f471a67..7df34ef9 100644 --- a/src/Persistence_representations/example/persistence_landscape_on_grid.cpp +++ b/src/Persistence_representations/example/persistence_landscape_on_grid.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/example/persistence_vectors.cpp b/src/Persistence_representations/example/persistence_vectors.cpp index 072e530d..d0cf2fc8 100644 --- a/src/Persistence_representations/example/persistence_vectors.cpp +++ b/src/Persistence_representations/example/persistence_vectors.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp index 089172a0..6c01c3af 100644 --- a/src/Persistence_representations/example/sliced_wasserstein.cpp +++ b/src/Persistence_representations/example/sliced_wasserstein.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 INRIA (France) * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/include/gudhi/PSSK.h b/src/Persistence_representations/include/gudhi/PSSK.h index e1174455..995d0ca2 100644 --- a/src/Persistence_representations/include/gudhi/PSSK.h +++ b/src/Persistence_representations/include/gudhi/PSSK.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PSSK_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h index a8458bda..12cb04c2 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h +++ b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h @@ -9,18 +9,8 @@ * * Copyright (C) 2019 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENCE_HEAT_MAPS_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_intervals.h b/src/Persistence_representations/include/gudhi/Persistence_intervals.h index 76eac7d7..47953596 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_intervals.h +++ b/src/Persistence_representations/include/gudhi/Persistence_intervals.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENCE_INTERVALS_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h b/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h index f48d1a3b..9f605d35 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h +++ b/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENCE_INTERVALS_WITH_DISTANCES_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape.h b/src/Persistence_representations/include/gudhi/Persistence_landscape.h index 9cab0166..f949372b 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_landscape.h +++ b/src/Persistence_representations/include/gudhi/Persistence_landscape.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENCE_LANDSCAPE_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h index fd8a181c..68bce336 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h +++ b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h @@ -1,25 +1,12 @@ -/** This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - **/ + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ #ifndef PERSISTENCE_LANDSCAPE_ON_GRID_H_ #define PERSISTENCE_LANDSCAPE_ON_GRID_H_ diff --git a/src/Persistence_representations/include/gudhi/Persistence_vectors.h b/src/Persistence_representations/include/gudhi/Persistence_vectors.h index 9c04be1d..a8f07b3b 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_vectors.h +++ b/src/Persistence_representations/include/gudhi/Persistence_vectors.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENCE_VECTORS_H_ diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index 18165c5f..fbe12422 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SLICED_WASSERSTEIN_H_ diff --git a/src/Persistence_representations/include/gudhi/common_persistence_representations.h b/src/Persistence_representations/include/gudhi/common_persistence_representations.h index 6fed019a..488d4529 100644 --- a/src/Persistence_representations/include/gudhi/common_persistence_representations.h +++ b/src/Persistence_representations/include/gudhi/common_persistence_representations.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef COMMON_PERSISTENCE_REPRESENTATIONS_H_ diff --git a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h index 4a2b9d68..db21c714 100644 --- a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h +++ b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef READ_PERSISTENCE_FROM_FILE_H_ diff --git a/src/Persistence_representations/test/kernels.cpp b/src/Persistence_representations/test/kernels.cpp index b8d02d4c..eb27747c 100644 --- a/src/Persistence_representations/test/kernels.cpp +++ b/src/Persistence_representations/test/kernels.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 INRIA * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Persistence_representations/test/persistence_heat_maps_test.cpp b/src/Persistence_representations/test/persistence_heat_maps_test.cpp index 5fad8051..57e1b4b3 100644 --- a/src/Persistence_representations/test/persistence_heat_maps_test.cpp +++ b/src/Persistence_representations/test/persistence_heat_maps_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Persistence_representations/test/persistence_intervals_test.cpp b/src/Persistence_representations/test/persistence_intervals_test.cpp index a89db9e3..513e042d 100644 --- a/src/Persistence_representations/test/persistence_intervals_test.cpp +++ b/src/Persistence_representations/test/persistence_intervals_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp b/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp index 6ba9a470..3b334533 100644 --- a/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp +++ b/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp b/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp index 7eca413b..36e935ac 100644 --- a/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp +++ b/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Persistence_representations/test/persistence_lanscapes_test.cpp b/src/Persistence_representations/test/persistence_lanscapes_test.cpp index 27ad0987..5b368462 100644 --- a/src/Persistence_representations/test/persistence_lanscapes_test.cpp +++ b/src/Persistence_representations/test/persistence_lanscapes_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Persistence_representations/test/read_persistence_from_file_test.cpp b/src/Persistence_representations/test/read_persistence_from_file_test.cpp index c3e8cb4e..ac0438d7 100644 --- a/src/Persistence_representations/test/read_persistence_from_file_test.cpp +++ b/src/Persistence_representations/test/read_persistence_from_file_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Persistence_representations/test/vector_representation_test.cpp b/src/Persistence_representations/test/vector_representation_test.cpp index a6b9314e..9c038727 100644 --- a/src/Persistence_representations/test/vector_representation_test.cpp +++ b/src/Persistence_representations/test/vector_representation_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp index 2cbd812b..4edaffdc 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp index 14d0db8f..e557e82d 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp index 12fceedc..a700724c 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp index 21c553b9..f82a39b0 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp @@ -1,24 +1,12 @@ -/* This file is part of the Gudhi Library. The Gudhi library -* (Geometric Understanding in Higher Dimensions) is a generic C++ -* library for computational topology. -* -* Author(s): Pawel Dlotko -* -* Copyright (C) 2016 Inria -* -* This program is free software: you can redistribute it and/or modify -* it under the terms of the GNU General Public License as published by -* the Free Software Foundation, either version 3 of the License, or -* (at your option) any later version. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -* GNU General Public License for more details. -* -* You should have received a copy of the GNU General Public License -* along with this program. If not, see . -*/ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Pawel Dlotko + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp index 99b0bd17..66bf9416 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp index a4b6e458..ac6ec212 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp index 5960a89f..6a3cc2a9 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp index 04f33915..40ec56ce 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp index e4402589..d351e3b6 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp index 3be3de8f..aa051497 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp index a6953b98..ca67c74f 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp index 4f052f42..e457c6f4 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp index f283971b..b6b35fa1 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp index 1cacbcd0..d171d809 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp index f92d5782..656958cc 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp index 4048f508..06c61bbf 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp index 253fa273..73d83de2 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp index 11fe2886..313d09f5 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp index 59aad2f3..9ae2e670 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp index f32a92a1..5c114f27 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp index 47102087..14c84d51 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp index 6cf2739d..b732cb82 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp index 9417be6b..91643e2a 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp index 46e229bc..0964eca3 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp index 39e438d2..079bd76e 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp index 45199838..663555a6 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp index 0db7dbec..a2e8b226 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp index 8e99251b..842c36c4 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp index 364284e5..5060ca8b 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp index 550e47c5..b449be3e 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp index 3b00d7a9..45757002 100644 --- a/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp +++ b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/concept/CoefficientField.h b/src/Persistent_cohomology/concept/CoefficientField.h index 9d066cca..916f49e2 100644 --- a/src/Persistent_cohomology/concept/CoefficientField.h +++ b/src/Persistent_cohomology/concept/CoefficientField.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Maria + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief Concept describing the requirements for a class to represent * a field of coefficients to compute persistent homology. diff --git a/src/Persistent_cohomology/concept/FilteredComplex.h b/src/Persistent_cohomology/concept/FilteredComplex.h index 7eb01b01..26ac7ac8 100644 --- a/src/Persistent_cohomology/concept/FilteredComplex.h +++ b/src/Persistent_cohomology/concept/FilteredComplex.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Maria + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief The concept FilteredComplex describes the requirements * for a type to implement a filtered cell complex, from which diff --git a/src/Persistent_cohomology/concept/PersistentHomology.h b/src/Persistent_cohomology/concept/PersistentHomology.h index f9a78763..373832af 100644 --- a/src/Persistent_cohomology/concept/PersistentHomology.h +++ b/src/Persistent_cohomology/concept/PersistentHomology.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Maria + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief Concept describing the requirements for a class to compute * persistent homology. */ diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h index 5fb9d4d2..a16591ce 100644 --- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h +++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_PERSISTENT_COHOMOLOGY_INTRO_PERSISTENT_COHOMOLOGY_H_ diff --git a/src/Persistent_cohomology/example/custom_persistence_sort.cpp b/src/Persistent_cohomology/example/custom_persistence_sort.cpp index 35366144..b2aed37c 100644 --- a/src/Persistent_cohomology/example/custom_persistence_sort.cpp +++ b/src/Persistent_cohomology/example/custom_persistence_sort.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/example/persistence_from_file.cpp b/src/Persistent_cohomology/example/persistence_from_file.cpp index 0a05c193..d169cc63 100644 --- a/src/Persistent_cohomology/example/persistence_from_file.cpp +++ b/src/Persistent_cohomology/example/persistence_from_file.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp index ffccfd86..f95dfeeb 100644 --- a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp +++ b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp index a2256060..b8c8b1f9 100644 --- a/src/Persistent_cohomology/example/plain_homology.cpp +++ b/src/Persistent_cohomology/example/plain_homology.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp index d6a5bdad..9eb5ccfc 100644 --- a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp +++ b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp index 796cfa3a..02db05ec 100644 --- a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp +++ b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp index 71fc0802..37fa5e93 100644 --- a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp +++ b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria, Marc Glisse * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index c57174cb..e62ccbc8 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENT_COHOMOLOGY_H_ diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h index e98b4bb4..0d6c0f82 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENT_COHOMOLOGY_FIELD_ZP_H_ diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h index 2bae8654..716d91cd 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENT_COHOMOLOGY_MULTI_FIELD_H_ diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h index de6c0750..9d7edfe6 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PERSISTENT_COHOMOLOGY_PERSISTENT_COHOMOLOGY_COLUMN_H_ diff --git a/src/Rips_complex/concept/SimplicialComplexForRips.h b/src/Rips_complex/concept/SimplicialComplexForRips.h index 36ab1b0c..21771dcb 100644 --- a/src/Rips_complex/concept/SimplicialComplexForRips.h +++ b/src/Rips_complex/concept/SimplicialComplexForRips.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_RIPS_COMPLEX_SIMPLICIAL_COMPLEX_FOR_RIPS_H_ diff --git a/src/Rips_complex/doc/Intro_rips_complex.h b/src/Rips_complex/doc/Intro_rips_complex.h index 97d66fbd..6e5103ac 100644 --- a/src/Rips_complex/doc/Intro_rips_complex.h +++ b/src/Rips_complex/doc/Intro_rips_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_RIPS_COMPLEX_INTRO_RIPS_COMPLEX_H_ diff --git a/src/Rips_complex/include/gudhi/Rips_complex.h b/src/Rips_complex/include/gudhi/Rips_complex.h index ee100867..958abbe2 100644 --- a/src/Rips_complex/include/gudhi/Rips_complex.h +++ b/src/Rips_complex/include/gudhi/Rips_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef RIPS_COMPLEX_H_ diff --git a/src/Rips_complex/include/gudhi/Sparse_rips_complex.h b/src/Rips_complex/include/gudhi/Sparse_rips_complex.h index 8df6e387..081a0233 100644 --- a/src/Rips_complex/include/gudhi/Sparse_rips_complex.h +++ b/src/Rips_complex/include/gudhi/Sparse_rips_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SPARSE_RIPS_COMPLEX_H_ diff --git a/src/Rips_complex/test/test_rips_complex.cpp b/src/Rips_complex/test/test_rips_complex.cpp index b8b444c9..cc8745cf 100644 --- a/src/Rips_complex/test/test_rips_complex.cpp +++ b/src/Rips_complex/test/test_rips_complex.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp index 287e8915..f634a2ea 100644 --- a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp +++ b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp index c73152cf..98bc6eba 100644 --- a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp +++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Rips_complex/utilities/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp index 9410b9c2..a6357847 100644 --- a/src/Rips_complex/utilities/rips_persistence.cpp +++ b/src/Rips_complex/utilities/rips_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Rips_complex/utilities/sparse_rips_persistence.cpp b/src/Rips_complex/utilities/sparse_rips_persistence.cpp index 3840d9f7..a7db4ec6 100644 --- a/src/Rips_complex/utilities/sparse_rips_persistence.cpp +++ b/src/Rips_complex/utilities/sparse_rips_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Simplex_tree/concept/FiltrationValue.h b/src/Simplex_tree/concept/FiltrationValue.h index f4dcf985..6cf314fa 100644 --- a/src/Simplex_tree/concept/FiltrationValue.h +++ b/src/Simplex_tree/concept/FiltrationValue.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Maria + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief Value type for a filtration function on a cell complex. * diff --git a/src/Simplex_tree/concept/IndexingTag.h b/src/Simplex_tree/concept/IndexingTag.h index 37e7e294..5ab50f1c 100644 --- a/src/Simplex_tree/concept/IndexingTag.h +++ b/src/Simplex_tree/concept/IndexingTag.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Maria + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief Concept describing an indexing scheme (see FilteredComplex) * for applying diff --git a/src/Simplex_tree/concept/SimplexKey.h b/src/Simplex_tree/concept/SimplexKey.h index c03f7da1..838fc498 100644 --- a/src/Simplex_tree/concept/SimplexKey.h +++ b/src/Simplex_tree/concept/SimplexKey.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Maria + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief Key type used as simplex identifier. * diff --git a/src/Simplex_tree/concept/SimplexTreeOptions.h b/src/Simplex_tree/concept/SimplexTreeOptions.h index 6638da26..cee1a890 100644 --- a/src/Simplex_tree/concept/SimplexTreeOptions.h +++ b/src/Simplex_tree/concept/SimplexTreeOptions.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Marc Glisse - * - * Copyright (C) 2015 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Marc Glisse + * + * Copyright (C) 2015 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief Concept of the template parameter for the class `Gudhi::Simplex_tree`. * diff --git a/src/Simplex_tree/concept/VertexHandle.h b/src/Simplex_tree/concept/VertexHandle.h index 9d0642c3..6dd3741b 100644 --- a/src/Simplex_tree/concept/VertexHandle.h +++ b/src/Simplex_tree/concept/VertexHandle.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Maria + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ /** \brief Handle type for the vertices of a cell complex. * diff --git a/src/Simplex_tree/doc/Intro_simplex_tree.h b/src/Simplex_tree/doc/Intro_simplex_tree.h index db399489..b01e3e92 100644 --- a/src/Simplex_tree/doc/Intro_simplex_tree.h +++ b/src/Simplex_tree/doc/Intro_simplex_tree.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_SIMPLEX_TREE_INTRO_SIMPLEX_TREE_H_ diff --git a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp index 6bab8adb..fb1a3a4c 100644 --- a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp +++ b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp index 290a9d9b..8803dbb2 100644 --- a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp +++ b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp index f39de31f..34bfd77c 100644 --- a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp +++ b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Simplex_tree/example/mini_simplex_tree.cpp b/src/Simplex_tree/example/mini_simplex_tree.cpp index e7c7177f..6370b508 100644 --- a/src/Simplex_tree/example/mini_simplex_tree.cpp +++ b/src/Simplex_tree/example/mini_simplex_tree.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Simplex_tree/example/simple_simplex_tree.cpp b/src/Simplex_tree/example/simple_simplex_tree.cpp index d71b5608..6a0a7fc0 100644 --- a/src/Simplex_tree/example/simple_simplex_tree.cpp +++ b/src/Simplex_tree/example/simple_simplex_tree.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp index 6d70f3d1..eb0282f2 100644 --- a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp +++ b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 343ed472..76b789c4 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SIMPLEX_TREE_H_ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h index 02c8bb64..7b6dea0f 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SIMPLEX_TREE_SIMPLEX_TREE_ITERATORS_H_ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h index 3a75ec72..26bf0569 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SIMPLEX_TREE_SIMPLEX_TREE_NODE_EXPLICIT_STORAGE_H_ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h index ab2ca707..d2b7d8d9 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SIMPLEX_TREE_SIMPLEX_TREE_SIBLINGS_H_ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h b/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h index ec4461f3..4df7833c 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SIMPLEX_TREE_INDEXING_TAG_H_ diff --git a/src/Skeleton_blocker/concept/SkeletonBlockerDS.h b/src/Skeleton_blocker/concept/SkeletonBlockerDS.h index fd806ff1..52db1766 100644 --- a/src/Skeleton_blocker/concept/SkeletonBlockerDS.h +++ b/src/Skeleton_blocker/concept/SkeletonBlockerDS.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_SKELETON_BLOCKER_SKELETONBLOCKERDS_H_ diff --git a/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h b/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h index d8521343..5987f6fb 100644 --- a/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h +++ b/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_SKELETON_BLOCKER_SKELETONBLOCKERGEOMETRICDS_H_ diff --git a/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp b/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp index f288e39c..486827eb 100644 --- a/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp +++ b/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp b/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp index 4d008450..7f301047 100644 --- a/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp +++ b/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp b/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp index 2ec72128..e634b656 100644 --- a/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp +++ b/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h index e8b6fde8..169cd3b3 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h index 6c6a8638..533cc777 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SKELETON_BLOCKER_COMPLEX_VISITOR_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h index feab7b3f..c9ebd9ad 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SKELETON_BLOCKER_LINK_SUPERIOR_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h index 56009daf..90793843 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SKELETON_BLOCKER_OFF_IO_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h index 22c1668e..3d3d8425 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SKELETON_BLOCKER_SIMPLE_GEOMETRIC_TRAITS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h index 144f1fd0..a931ec98 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SKELETON_BLOCKER_SIMPLE_TRAITS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h index d7193157..115a3a84 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SKELETON_BLOCKER_SIMPLEX_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h index dbfb4042..a9e50d11 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SKELETON_BLOCKER_SUB_COMPLEX_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h index f80ca4fe..d2aa59d3 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_INTERNAL_TOP_FACES_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h index 7a5d38eb..a43fa034 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_INTERNAL_TRIE_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h index 95c5f7ef..66371d0e 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_ITERATORS_SKELETON_BLOCKERS_BLOCKERS_ITERATORS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h index 5c725aae..63b963dd 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_ITERATORS_SKELETON_BLOCKERS_EDGES_ITERATORS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_iterators.h index 8054e64f..7b43e05f 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_iterators.h @@ -1,24 +1,12 @@ - /* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): David Salinas - * - * Copyright (C) 2014 Inria - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): David Salinas + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ #ifndef SKELETON_BLOCKER_ITERATORS_SKELETON_BLOCKERS_ITERATORS_H_ #define SKELETON_BLOCKER_ITERATORS_SKELETON_BLOCKERS_ITERATORS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h index e2024652..1968f43b 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_ITERATORS_SKELETON_BLOCKERS_SIMPLICES_ITERATORS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h index a834fe1d..ca4d46f6 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_ITERATORS_SKELETON_BLOCKERS_TRIANGLES_ITERATORS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h index 3a638ae6..81c6e82b 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_ITERATORS_SKELETON_BLOCKERS_VERTICES_ITERATORS_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h index addd8104..a8d2420d 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_COMPLEX_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h index 39b88ceb..c7946516 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_GEOMETRIC_COMPLEX_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h index 428d4e9b..38fd32fd 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_LINK_COMPLEX_H_ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h index d5adb39d..f8121fcc 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SKELETON_BLOCKER_SIMPLIFIABLE_COMPLEX_H_ diff --git a/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp b/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp index 9760c74d..8265d763 100644 --- a/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp +++ b/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include #include diff --git a/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp b/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp index f2d3bb27..8cad97a1 100644 --- a/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp +++ b/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp b/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp index 1f263c98..b714753d 100644 --- a/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp +++ b/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Spatial_searching/doc/Intro_spatial_searching.h b/src/Spatial_searching/doc/Intro_spatial_searching.h index f387ab2f..5cc458e4 100644 --- a/src/Spatial_searching/doc/Intro_spatial_searching.h +++ b/src/Spatial_searching/doc/Intro_spatial_searching.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_SPATIAL_SEARCHING_INTRO_SPATIAL_SEARCHING_H_ diff --git a/src/Spatial_searching/include/gudhi/Kd_tree_search.h b/src/Spatial_searching/include/gudhi/Kd_tree_search.h index ad1054e5..92f3f11b 100644 --- a/src/Spatial_searching/include/gudhi/Kd_tree_search.h +++ b/src/Spatial_searching/include/gudhi/Kd_tree_search.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef KD_TREE_SEARCH_H_ diff --git a/src/Spatial_searching/test/test_Kd_tree_search.cpp b/src/Spatial_searching/test/test_Kd_tree_search.cpp index 981a5850..37d7c073 100644 --- a/src/Spatial_searching/test/test_Kd_tree_search.cpp +++ b/src/Spatial_searching/test/test_Kd_tree_search.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Subsampling/doc/Intro_subsampling.h b/src/Subsampling/doc/Intro_subsampling.h index d88f6bf6..927e2b72 100644 --- a/src/Subsampling/doc/Intro_subsampling.h +++ b/src/Subsampling/doc/Intro_subsampling.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_SUBSAMPLING_INTRO_SUBSAMPLING_H_ diff --git a/src/Subsampling/include/gudhi/choose_n_farthest_points.h b/src/Subsampling/include/gudhi/choose_n_farthest_points.h index ab1c4c73..f99df0fa 100644 --- a/src/Subsampling/include/gudhi/choose_n_farthest_points.h +++ b/src/Subsampling/include/gudhi/choose_n_farthest_points.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CHOOSE_N_FARTHEST_POINTS_H_ diff --git a/src/Subsampling/include/gudhi/pick_n_random_points.h b/src/Subsampling/include/gudhi/pick_n_random_points.h index 64821e5d..f7734238 100644 --- a/src/Subsampling/include/gudhi/pick_n_random_points.h +++ b/src/Subsampling/include/gudhi/pick_n_random_points.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef PICK_N_RANDOM_POINTS_H_ diff --git a/src/Subsampling/include/gudhi/sparsify_point_set.h b/src/Subsampling/include/gudhi/sparsify_point_set.h index db10e0b1..d39d3906 100644 --- a/src/Subsampling/include/gudhi/sparsify_point_set.h +++ b/src/Subsampling/include/gudhi/sparsify_point_set.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SPARSIFY_POINT_SET_H_ diff --git a/src/Subsampling/test/test_choose_n_farthest_points.cpp b/src/Subsampling/test/test_choose_n_farthest_points.cpp index 0e0eb29c..7e3dfb21 100644 --- a/src/Subsampling/test/test_choose_n_farthest_points.cpp +++ b/src/Subsampling/test/test_choose_n_farthest_points.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ // #ifdef _DEBUG diff --git a/src/Subsampling/test/test_pick_n_random_points.cpp b/src/Subsampling/test/test_pick_n_random_points.cpp index 4baf4a5d..49138ab4 100644 --- a/src/Subsampling/test/test_pick_n_random_points.cpp +++ b/src/Subsampling/test/test_pick_n_random_points.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ // #ifdef _DEBUG diff --git a/src/Subsampling/test/test_sparsify_point_set.cpp b/src/Subsampling/test/test_sparsify_point_set.cpp index f414dda3..b2a55663 100644 --- a/src/Subsampling/test/test_sparsify_point_set.cpp +++ b/src/Subsampling/test/test_sparsify_point_set.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Tangential_complex/benchmark/RIB_exporter.h b/src/Tangential_complex/benchmark/RIB_exporter.h index 59ca138a..7e8a8ed9 100644 --- a/src/Tangential_complex/benchmark/RIB_exporter.h +++ b/src/Tangential_complex/benchmark/RIB_exporter.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GUDHI_TC_RIB_EXPORTER_H diff --git a/src/Tangential_complex/benchmark/XML_exporter.h b/src/Tangential_complex/benchmark/XML_exporter.h index 4db5687f..afa67288 100644 --- a/src/Tangential_complex/benchmark/XML_exporter.h +++ b/src/Tangential_complex/benchmark/XML_exporter.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Tangential_complex/doc/Intro_tangential_complex.h b/src/Tangential_complex/doc/Intro_tangential_complex.h index 501f4a8b..3bd84aa8 100644 --- a/src/Tangential_complex/doc/Intro_tangential_complex.h +++ b/src/Tangential_complex/doc/Intro_tangential_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h index 4a78127c..7fdd68ee 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef TANGENTIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h index f79186b0..8f008236 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h index e1af1ea6..6de698ab 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef TANGENTIAL_COMPLEX_CONFIG_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h index 2dd46118..14e71797 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef TANGENTIAL_COMPLEX_UTILITIES_H_ diff --git a/src/Tangential_complex/test/test_tangential_complex.cpp b/src/Tangential_complex/test/test_tangential_complex.cpp index 103b8b30..94038c29 100644 --- a/src/Tangential_complex/test/test_tangential_complex.cpp +++ b/src/Tangential_complex/test/test_tangential_complex.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #define BOOST_TEST_DYN_LINK diff --git a/src/Toplex_map/benchmark/benchmark_tm.cpp b/src/Toplex_map/benchmark/benchmark_tm.cpp index eedb442b..f132b783 100644 --- a/src/Toplex_map/benchmark/benchmark_tm.cpp +++ b/src/Toplex_map/benchmark/benchmark_tm.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 INRIA * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Toplex_map/doc/Intro_Toplex_map.h b/src/Toplex_map/doc/Intro_Toplex_map.h index a925dc2b..58c22f64 100644 --- a/src/Toplex_map/doc/Intro_Toplex_map.h +++ b/src/Toplex_map/doc/Intro_Toplex_map.h @@ -6,18 +6,8 @@ * * Copyright (C) 2017 INRIA * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DOC_TOPLEX_MAP_INTRO_TOPLEX_MAP_H_ diff --git a/src/Toplex_map/example/simple_toplex_map.cpp b/src/Toplex_map/example/simple_toplex_map.cpp index e1c12ed6..27ce0fbe 100644 --- a/src/Toplex_map/example/simple_toplex_map.cpp +++ b/src/Toplex_map/example/simple_toplex_map.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Toplex_map/include/gudhi/Lazy_toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_toplex_map.h index d7bccdff..c328e43b 100644 --- a/src/Toplex_map/include/gudhi/Lazy_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Lazy_toplex_map.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 INRIA * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef LAZY_TOPLEX_MAP_H diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index 4dc2331f..7deebef7 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 INRIA * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef TOPLEX_MAP_H diff --git a/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp b/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp index a050cc92..2cca9c46 100644 --- a/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 INRIA * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp index 2bd27936..c4c37bb3 100644 --- a/src/Toplex_map/test/toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/toplex_map_unit_test.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 INRIA * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Witness_complex/concept/SimplicialComplexForWitness.h b/src/Witness_complex/concept/SimplicialComplexForWitness.h index 8b85f4e4..609b59ae 100644 --- a/src/Witness_complex/concept/SimplicialComplexForWitness.h +++ b/src/Witness_complex/concept/SimplicialComplexForWitness.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONCEPT_WITNESS_COMPLEX_SIMPLICIAL_COMPLEX_FOR_WITNESS_H_ diff --git a/src/Witness_complex/example/generators.h b/src/Witness_complex/example/generators.h index 4b755daa..214cd059 100644 --- a/src/Witness_complex/example/generators.h +++ b/src/Witness_complex/example/generators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GENERATORS_H_ diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h index 8cb8662b..56b3e808 100644 --- a/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h +++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef ACTIVE_WITNESS_ACTIVE_WITNESS_H_ diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h index 10d2ec52..6aa9c0dd 100644 --- a/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h +++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef ACTIVE_WITNESS_ACTIVE_WITNESS_ITERATOR_H_ diff --git a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h index ea97cd3f..d2bf00ce 100644 --- a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef EUCLIDEAN_STRONG_WITNESS_COMPLEX_H_ diff --git a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h index 1dacefa5..a4430301 100644 --- a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef EUCLIDEAN_WITNESS_COMPLEX_H_ diff --git a/src/Witness_complex/include/gudhi/Strong_witness_complex.h b/src/Witness_complex/include/gudhi/Strong_witness_complex.h index 03d6d2e7..5861ec62 100644 --- a/src/Witness_complex/include/gudhi/Strong_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Strong_witness_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef STRONG_WITNESS_COMPLEX_H_ diff --git a/src/Witness_complex/include/gudhi/Witness_complex.h b/src/Witness_complex/include/gudhi/Witness_complex.h index 1f61f8f2..375a79ac 100644 --- a/src/Witness_complex/include/gudhi/Witness_complex.h +++ b/src/Witness_complex/include/gudhi/Witness_complex.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef WITNESS_COMPLEX_H_ diff --git a/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h index c7b732b9..ae7ad0f3 100644 --- a/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h +++ b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef WITNESS_COMPLEX_ALL_FACES_IN_H_ diff --git a/src/Witness_complex/utilities/strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp index f386e992..a06bb1fa 100644 --- a/src/Witness_complex/utilities/strong_witness_persistence.cpp +++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/Witness_complex/utilities/weak_witness_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp index ea00cfe7..35d09f95 100644 --- a/src/Witness_complex/utilities/weak_witness_persistence.cpp +++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp b/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp index 825d6cb5..a03d1757 100644 --- a/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp +++ b/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/common/doc/file_formats.h b/src/common/doc/file_formats.h index 235296d3..4af5d45c 100644 --- a/src/common/doc/file_formats.h +++ b/src/common/doc/file_formats.h @@ -1,24 +1,12 @@ -/* This file is part of the Gudhi Library. The Gudhi library -* (Geometric Understanding in Higher Dimensions) is a generic C++ -* library for computational topology. -* -* Author(s): Clément Jamin -* -* Copyright (C) 2017 Inria -* -* This program is free software: you can redistribute it and/or modify -* it under the terms of the GNU General Public License as published by -* the Free Software Foundation, either version 3 of the License, or -* (at your option) any later version. -* -* This program is distributed in the hope that it will be useful, -* but WITHOUT ANY WARRANTY; without even the implied warranty of -* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -* GNU General Public License for more details. -* -* You should have received a copy of the GNU General Public License -* along with this program. If not, see . -*/ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Clément Jamin + * + * Copyright (C) 2017 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ #ifndef DOC_COMMON_FILE_FORMAT_H_ #define DOC_COMMON_FILE_FORMAT_H_ diff --git a/src/common/include/gudhi/Clock.h b/src/common/include/gudhi/Clock.h index cdf18cb2..00ab2f27 100644 --- a/src/common/include/gudhi/Clock.h +++ b/src/common/include/gudhi/Clock.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CLOCK_H_ diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h index 3f5cb04f..826cbc3e 100644 --- a/src/common/include/gudhi/Debug_utils.h +++ b/src/common/include/gudhi/Debug_utils.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DEBUG_UTILS_H_ #define DEBUG_UTILS_H_ diff --git a/src/common/include/gudhi/Null_output_iterator.h b/src/common/include/gudhi/Null_output_iterator.h index c700af5f..81309080 100644 --- a/src/common/include/gudhi/Null_output_iterator.h +++ b/src/common/include/gudhi/Null_output_iterator.h @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef NULL_OUTPUT_ITERATOR_H_ diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h index fc951fe7..aaff95b8 100644 --- a/src/common/include/gudhi/Off_reader.h +++ b/src/common/include/gudhi/Off_reader.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ diff --git a/src/common/include/gudhi/Point.h b/src/common/include/gudhi/Point.h index 345a8465..e85277e9 100644 --- a/src/common/include/gudhi/Point.h +++ b/src/common/include/gudhi/Point.h @@ -1,24 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef POINT_H_ diff --git a/src/common/include/gudhi/Points_3D_off_io.h b/src/common/include/gudhi/Points_3D_off_io.h index 704f73a7..2112cc05 100644 --- a/src/common/include/gudhi/Points_3D_off_io.h +++ b/src/common/include/gudhi/Points_3D_off_io.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef POINTS_3D_OFF_IO_H_ #define POINTS_3D_OFF_IO_H_ diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h index 38029658..a0fca77e 100644 --- a/src/common/include/gudhi/Points_off_io.h +++ b/src/common/include/gudhi/Points_off_io.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef POINTS_OFF_IO_H_ #define POINTS_OFF_IO_H_ diff --git a/src/common/include/gudhi/Simple_object_pool.h b/src/common/include/gudhi/Simple_object_pool.h index 47283521..164849e1 100644 --- a/src/common/include/gudhi/Simple_object_pool.h +++ b/src/common/include/gudhi/Simple_object_pool.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef SIMPLE_OBJECT_POOL_H_ diff --git a/src/common/include/gudhi/Unitary_tests_utils.h b/src/common/include/gudhi/Unitary_tests_utils.h index 22f00212..5ab20af8 100644 --- a/src/common/include/gudhi/Unitary_tests_utils.h +++ b/src/common/include/gudhi/Unitary_tests_utils.h @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef UNITARY_TESTS_UTILS_H_ #define UNITARY_TESTS_UTILS_H_ diff --git a/src/common/include/gudhi/allocator.h b/src/common/include/gudhi/allocator.h index 3de16a49..e828f441 100644 --- a/src/common/include/gudhi/allocator.h +++ b/src/common/include/gudhi/allocator.h @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef ALLOCATOR_H_ diff --git a/src/common/include/gudhi/console_color.h b/src/common/include/gudhi/console_color.h index a493e0d0..7681ae66 100644 --- a/src/common/include/gudhi/console_color.h +++ b/src/common/include/gudhi/console_color.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef CONSOLE_COLOR_H_ diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h index 5ef12f2e..94cf9ccc 100644 --- a/src/common/include/gudhi/distance_functions.h +++ b/src/common/include/gudhi/distance_functions.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef DISTANCE_FUNCTIONS_H_ diff --git a/src/common/include/gudhi/graph_simplicial_complex.h b/src/common/include/gudhi/graph_simplicial_complex.h index 0d81ca71..b8508697 100644 --- a/src/common/include/gudhi/graph_simplicial_complex.h +++ b/src/common/include/gudhi/graph_simplicial_complex.h @@ -1,23 +1,11 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef GRAPH_SIMPLICIAL_COMPLEX_H_ diff --git a/src/common/include/gudhi/random_point_generators.h b/src/common/include/gudhi/random_point_generators.h index f8107c8b..dbaf0ab1 100644 --- a/src/common/include/gudhi/random_point_generators.h +++ b/src/common/include/gudhi/random_point_generators.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef RANDOM_POINT_GENERATORS_H_ diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h index 0ee7649d..1365b560 100644 --- a/src/common/include/gudhi/reader_utils.h +++ b/src/common/include/gudhi/reader_utils.h @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef READER_UTILS_H_ diff --git a/src/common/include/gudhi/writing_persistence_to_file.h b/src/common/include/gudhi/writing_persistence_to_file.h index 34448576..c0d4929e 100644 --- a/src/common/include/gudhi/writing_persistence_to_file.h +++ b/src/common/include/gudhi/writing_persistence_to_file.h @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Swansea University, UK * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef WRITING_PERSISTENCE_TO_FILE_H_ diff --git a/src/common/test/test_distance_matrix_reader.cpp b/src/common/test/test_distance_matrix_reader.cpp index 6fee86e2..c25b2f53 100644 --- a/src/common/test/test_distance_matrix_reader.cpp +++ b/src/common/test/test_distance_matrix_reader.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/common/test/test_persistence_intervals_reader.cpp b/src/common/test/test_persistence_intervals_reader.cpp index b7ece9bd..0d0f515e 100644 --- a/src/common/test/test_persistence_intervals_reader.cpp +++ b/src/common/test/test_persistence_intervals_reader.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/common/test/test_points_off_reader.cpp b/src/common/test/test_points_off_reader.cpp index e4b76ed7..49fa1ec2 100644 --- a/src/common/test/test_points_off_reader.cpp +++ b/src/common/test/test_points_off_reader.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2015 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/common/utilities/off_file_from_shape_generator.cpp b/src/common/utilities/off_file_from_shape_generator.cpp index 5e3da7f7..eb31e8bc 100644 --- a/src/common/utilities/off_file_from_shape_generator.cpp +++ b/src/common/utilities/off_file_from_shape_generator.cpp @@ -6,18 +6,8 @@ * * Copyright (C) 2014 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #include diff --git a/src/cython/cython/alpha_complex.pyx b/src/cython/cython/alpha_complex.pyx index 4f772e31..3861ae65 100644 --- a/src/cython/cython/alpha_complex.pyx +++ b/src/cython/cython/alpha_complex.pyx @@ -5,26 +5,14 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/bottleneck_distance.pyx b/src/cython/cython/bottleneck_distance.pyx index 76ef81f4..1af027be 100644 --- a/src/cython/cython/bottleneck_distance.pyx +++ b/src/cython/cython/bottleneck_distance.pyx @@ -3,26 +3,14 @@ from libcpp.vector cimport vector from libcpp.utility cimport pair import os -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/cubical_complex.pyx b/src/cython/cython/cubical_complex.pyx index 53d79b92..8f6c84eb 100644 --- a/src/cython/cython/cubical_complex.pyx +++ b/src/cython/cython/cubical_complex.pyx @@ -7,26 +7,14 @@ import os from numpy import array as np_array -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2019 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/euclidean_strong_witness_complex.pyx b/src/cython/cython/euclidean_strong_witness_complex.pyx index 62b7cf71..101b06c9 100644 --- a/src/cython/cython/euclidean_strong_witness_complex.pyx +++ b/src/cython/cython/euclidean_strong_witness_complex.pyx @@ -2,26 +2,14 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/euclidean_witness_complex.pyx b/src/cython/cython/euclidean_witness_complex.pyx index c10ca73d..ff27f157 100644 --- a/src/cython/cython/euclidean_witness_complex.pyx +++ b/src/cython/cython/euclidean_witness_complex.pyx @@ -2,26 +2,14 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/nerve_gic.pyx b/src/cython/cython/nerve_gic.pyx index 5f01b379..6eeaf5fe 100644 --- a/src/cython/cython/nerve_gic.pyx +++ b/src/cython/cython/nerve_gic.pyx @@ -5,26 +5,14 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2018 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/off_reader.pyx b/src/cython/cython/off_reader.pyx index b939013f..f1e97532 100644 --- a/src/cython/cython/off_reader.pyx +++ b/src/cython/cython/off_reader.pyx @@ -3,26 +3,14 @@ from libcpp.vector cimport vector from libcpp.string cimport string import os -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/periodic_cubical_complex.pyx b/src/cython/cython/periodic_cubical_complex.pyx index 3866f53b..3b50163e 100644 --- a/src/cython/cython/periodic_cubical_complex.pyx +++ b/src/cython/cython/periodic_cubical_complex.pyx @@ -7,26 +7,14 @@ import os from numpy import array as np_array -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2019 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/persistence_graphical_tools.py b/src/cython/cython/persistence_graphical_tools.py index 7bb69840..638d4d35 100644 --- a/src/cython/cython/persistence_graphical_tools.py +++ b/src/cython/cython/persistence_graphical_tools.py @@ -2,26 +2,14 @@ from os import path from math import isfinite import numpy as np -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau, Bertrand Michel +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau, Bertrand Michel Copyright (C) 2019 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau, Bertrand Michel" diff --git a/src/cython/cython/reader_utils.pyx b/src/cython/cython/reader_utils.pyx index 6dde5286..87239b29 100644 --- a/src/cython/cython/reader_utils.pyx +++ b/src/cython/cython/reader_utils.pyx @@ -7,26 +7,14 @@ from libcpp.pair cimport pair from os import path from numpy import array as np_array -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2019 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/rips_complex.pyx b/src/cython/cython/rips_complex.pyx index 7c83241c..a6f4c0a4 100644 --- a/src/cython/cython/rips_complex.pyx +++ b/src/cython/cython/rips_complex.pyx @@ -5,26 +5,14 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx index 43bc11c9..2947a766 100644 --- a/src/cython/cython/simplex_tree.pyx +++ b/src/cython/cython/simplex_tree.pyx @@ -6,26 +6,14 @@ from libcpp.string cimport string from numpy import array as np_array -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2019 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/strong_witness_complex.pyx b/src/cython/cython/strong_witness_complex.pyx index 4b7bff34..afb22ef3 100644 --- a/src/cython/cython/strong_witness_complex.pyx +++ b/src/cython/cython/strong_witness_complex.pyx @@ -2,26 +2,14 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/subsampling.pyx b/src/cython/cython/subsampling.pyx index e9d61a37..21419168 100644 --- a/src/cython/cython/subsampling.pyx +++ b/src/cython/cython/subsampling.pyx @@ -4,26 +4,14 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/tangential_complex.pyx b/src/cython/cython/tangential_complex.pyx index 293ef8cb..f5d88898 100644 --- a/src/cython/cython/tangential_complex.pyx +++ b/src/cython/cython/tangential_complex.pyx @@ -5,26 +5,14 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/cython/witness_complex.pyx b/src/cython/cython/witness_complex.pyx index b1cce83f..82aa4bae 100644 --- a/src/cython/cython/witness_complex.pyx +++ b/src/cython/cython/witness_complex.pyx @@ -2,26 +2,14 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py index 4abe22d4..0957175a 100755 --- a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/alpha_complex_from_points_example.py b/src/cython/example/alpha_complex_from_points_example.py index ad73c744..4338ed87 100755 --- a/src/cython/example/alpha_complex_from_points_example.py +++ b/src/cython/example/alpha_complex_from_points_example.py @@ -2,26 +2,14 @@ from gudhi import AlphaComplex, SimplexTree -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py index b51fa7a8..b5d6c0a7 100755 --- a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py +++ b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py @@ -4,26 +4,14 @@ import gudhi import argparse import math -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/bottleneck_basic_example.py b/src/cython/example/bottleneck_basic_example.py index 287956e7..4adc0785 100755 --- a/src/cython/example/bottleneck_basic_example.py +++ b/src/cython/example/bottleneck_basic_example.py @@ -2,26 +2,14 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Francois Godi, Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Francois Godi, Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Francois Godi, Vincent Rouvreau" diff --git a/src/cython/example/coordinate_graph_induced_complex.py b/src/cython/example/coordinate_graph_induced_complex.py index 9e93109a..d066b20a 100755 --- a/src/cython/example/coordinate_graph_induced_complex.py +++ b/src/cython/example/coordinate_graph_induced_complex.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2018 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py index 3b29781f..604d52c9 100755 --- a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py index db34962d..cd949dc4 100755 --- a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/functional_graph_induced_complex.py b/src/cython/example/functional_graph_induced_complex.py index 6ad7c2ec..fcc4373d 100755 --- a/src/cython/example/functional_graph_induced_complex.py +++ b/src/cython/example/functional_graph_induced_complex.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2018 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/gudhi_graphical_tools_example.py b/src/cython/example/gudhi_graphical_tools_example.py index ac3d146c..a031b0d0 100755 --- a/src/cython/example/gudhi_graphical_tools_example.py +++ b/src/cython/example/gudhi_graphical_tools_example.py @@ -2,26 +2,14 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/nerve_of_a_covering.py b/src/cython/example/nerve_of_a_covering.py index c5577cb1..97042865 100755 --- a/src/cython/example/nerve_of_a_covering.py +++ b/src/cython/example/nerve_of_a_covering.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2018 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py index 5f968bf1..0c9bf242 100755 --- a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py +++ b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/random_cubical_complex_persistence_example.py b/src/cython/example/random_cubical_complex_persistence_example.py index 80ff2452..efbfcdf8 100755 --- a/src/cython/example/random_cubical_complex_persistence_example.py +++ b/src/cython/example/random_cubical_complex_persistence_example.py @@ -7,26 +7,14 @@ import argparse import operator -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py index 0c9dfc43..5621f4a1 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py @@ -4,26 +4,14 @@ import gudhi import sys import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2017 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py index 4d2ed577..d298de9a 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py index d15d5eb0..39537e41 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/rips_complex_from_points_example.py b/src/cython/example/rips_complex_from_points_example.py index ffa9d91f..26df89ac 100755 --- a/src/cython/example/rips_complex_from_points_example.py +++ b/src/cython/example/rips_complex_from_points_example.py @@ -2,26 +2,14 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/rips_persistence_diagram.py b/src/cython/example/rips_persistence_diagram.py index 7a6a9f46..9e6d7a3f 100755 --- a/src/cython/example/rips_persistence_diagram.py +++ b/src/cython/example/rips_persistence_diagram.py @@ -2,26 +2,14 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Marc Glisse +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Marc Glisse Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Marc Glisse" diff --git a/src/cython/example/simplex_tree_example.py b/src/cython/example/simplex_tree_example.py index 28679015..51b8bfc7 100755 --- a/src/cython/example/simplex_tree_example.py +++ b/src/cython/example/simplex_tree_example.py @@ -2,26 +2,14 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/sparse_rips_persistence_diagram.py b/src/cython/example/sparse_rips_persistence_diagram.py index d58c244c..9dc26f08 100755 --- a/src/cython/example/sparse_rips_persistence_diagram.py +++ b/src/cython/example/sparse_rips_persistence_diagram.py @@ -2,26 +2,14 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Marc Glisse +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Marc Glisse Copyright (C) 2018 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Marc Glisse" diff --git a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py index 536517d1..49fd7026 100755 --- a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py +++ b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/voronoi_graph_induced_complex.py b/src/cython/example/voronoi_graph_induced_complex.py index 8266a0e4..623601d6 100755 --- a/src/cython/example/voronoi_graph_induced_complex.py +++ b/src/cython/example/voronoi_graph_induced_complex.py @@ -3,26 +3,14 @@ import gudhi import argparse -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2018 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/example/witness_complex_from_nearest_landmark_table.py b/src/cython/example/witness_complex_from_nearest_landmark_table.py index 1b79d9b2..9a358fb1 100755 --- a/src/cython/example/witness_complex_from_nearest_landmark_table.py +++ b/src/cython/example/witness_complex_from_nearest_landmark_table.py @@ -2,26 +2,14 @@ from gudhi import StrongWitnessComplex, SimplexTree -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in index 0d4b966b..723a31ad 100644 --- a/src/cython/gudhi.pyx.in +++ b/src/cython/gudhi.pyx.in @@ -1,23 +1,11 @@ -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/include/Alpha_complex_interface.h b/src/cython/include/Alpha_complex_interface.h index faa059d1..1199b741 100644 --- a/src/cython/include/Alpha_complex_interface.h +++ b/src/cython/include/Alpha_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_ALPHA_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Bottleneck_distance_interface.h b/src/cython/include/Bottleneck_distance_interface.h index 5ad9d77d..22c9a97a 100644 --- a/src/cython/include/Bottleneck_distance_interface.h +++ b/src/cython/include/Bottleneck_distance_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_ diff --git a/src/cython/include/Cubical_complex_interface.h b/src/cython/include/Cubical_complex_interface.h index 85b717b3..7d32914c 100644 --- a/src/cython/include/Cubical_complex_interface.h +++ b/src/cython/include/Cubical_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Euclidean_strong_witness_complex_interface.h b/src/cython/include/Euclidean_strong_witness_complex_interface.h index d86355d6..90bd54ac 100644 --- a/src/cython/include/Euclidean_strong_witness_complex_interface.h +++ b/src/cython/include/Euclidean_strong_witness_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Euclidean_witness_complex_interface.h b/src/cython/include/Euclidean_witness_complex_interface.h index dc303533..0c01a741 100644 --- a/src/cython/include/Euclidean_witness_complex_interface.h +++ b/src/cython/include/Euclidean_witness_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Nerve_gic_interface.h b/src/cython/include/Nerve_gic_interface.h index aa71e2a6..729b39fb 100644 --- a/src/cython/include/Nerve_gic_interface.h +++ b/src/cython/include/Nerve_gic_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2018 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_NERVE_GIC_INTERFACE_H_ diff --git a/src/cython/include/Off_reader_interface.h b/src/cython/include/Off_reader_interface.h index f6b14f38..4b3643be 100644 --- a/src/cython/include/Off_reader_interface.h +++ b/src/cython/include/Off_reader_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_OFF_READER_INTERFACE_H_ diff --git a/src/cython/include/Persistent_cohomology_interface.h b/src/cython/include/Persistent_cohomology_interface.h index 8cf71a4e..64e2ddc8 100644 --- a/src/cython/include/Persistent_cohomology_interface.h +++ b/src/cython/include/Persistent_cohomology_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ diff --git a/src/cython/include/Reader_utils_interface.h b/src/cython/include/Reader_utils_interface.h index 5f7527d9..5bddf9ce 100644 --- a/src/cython/include/Reader_utils_interface.h +++ b/src/cython/include/Reader_utils_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2017 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_READER_UTILS_INTERFACE_H_ diff --git a/src/cython/include/Rips_complex_interface.h b/src/cython/include/Rips_complex_interface.h index 40aff299..f818a2ed 100644 --- a/src/cython/include/Rips_complex_interface.h +++ b/src/cython/include/Rips_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_RIPS_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Simplex_tree_interface.h b/src/cython/include/Simplex_tree_interface.h index ca98517d..c15a44a5 100644 --- a/src/cython/include/Simplex_tree_interface.h +++ b/src/cython/include/Simplex_tree_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_SIMPLEX_TREE_INTERFACE_H_ diff --git a/src/cython/include/Strong_witness_complex_interface.h b/src/cython/include/Strong_witness_complex_interface.h index 3c72c916..4c333da8 100644 --- a/src/cython/include/Strong_witness_complex_interface.h +++ b/src/cython/include/Strong_witness_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Subsampling_interface.h b/src/cython/include/Subsampling_interface.h index f990da0c..bc390485 100644 --- a/src/cython/include/Subsampling_interface.h +++ b/src/cython/include/Subsampling_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_SUBSAMPLING_INTERFACE_H_ diff --git a/src/cython/include/Tangential_complex_interface.h b/src/cython/include/Tangential_complex_interface.h index c4ddbdbe..7c3f2789 100644 --- a/src/cython/include/Tangential_complex_interface.h +++ b/src/cython/include/Tangential_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Witness_complex_interface.h b/src/cython/include/Witness_complex_interface.h index 01b372e7..609277d6 100644 --- a/src/cython/include/Witness_complex_interface.h +++ b/src/cython/include/Witness_complex_interface.h @@ -6,18 +6,8 @@ * * Copyright (C) 2016 Inria * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . + * Modification(s): + * - YYYY/MM Author: Description of the modification */ #ifndef INCLUDE_WITNESS_COMPLEX_INTERFACE_H_ diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in index c66905ac..70c85852 100644 --- a/src/cython/setup.py.in +++ b/src/cython/setup.py.in @@ -2,26 +2,14 @@ from distutils.core import setup, Extension from Cython.Build import cythonize from numpy import get_include as numpy_get_include -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2019 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "GUDHI Editorial Board" diff --git a/src/cython/test/test_alpha_complex.py b/src/cython/test/test_alpha_complex.py index e97f2530..aac4c22a 100755 --- a/src/cython/test/test_alpha_complex.py +++ b/src/cython/test/test_alpha_complex.py @@ -1,25 +1,13 @@ from gudhi import AlphaComplex, SimplexTree -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_bottleneck_distance.py b/src/cython/test/test_bottleneck_distance.py index 4eb5848f..5be1da5a 100755 --- a/src/cython/test/test_bottleneck_distance.py +++ b/src/cython/test/test_bottleneck_distance.py @@ -1,25 +1,13 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_cover_complex.py b/src/cython/test/test_cover_complex.py index 58935264..7e99946d 100755 --- a/src/cython/test/test_cover_complex.py +++ b/src/cython/test/test_cover_complex.py @@ -1,25 +1,13 @@ from gudhi import CoverComplex -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2018 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_cubical_complex.py b/src/cython/test/test_cubical_complex.py index 92e591e9..e06b3c07 100755 --- a/src/cython/test/test_cubical_complex.py +++ b/src/cython/test/test_cubical_complex.py @@ -1,25 +1,13 @@ from gudhi import CubicalComplex -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_euclidean_witness_complex.py b/src/cython/test/test_euclidean_witness_complex.py index 2f77210a..04c73409 100755 --- a/src/cython/test/test_euclidean_witness_complex.py +++ b/src/cython/test/test_euclidean_witness_complex.py @@ -1,25 +1,13 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_reader_utils.py b/src/cython/test/test_reader_utils.py index 36e927b0..2166bb05 100755 --- a/src/cython/test/test_reader_utils.py +++ b/src/cython/test/test_reader_utils.py @@ -1,26 +1,14 @@ import gudhi import numpy as np -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2017 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_rips_complex.py b/src/cython/test/test_rips_complex.py index 05dfcaf7..4443fac5 100755 --- a/src/cython/test/test_rips_complex.py +++ b/src/cython/test/test_rips_complex.py @@ -1,26 +1,14 @@ from gudhi import RipsComplex from math import sqrt -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_simplex_tree.py b/src/cython/test/test_simplex_tree.py index cb701c9a..8310566b 100755 --- a/src/cython/test/test_simplex_tree.py +++ b/src/cython/test/test_simplex_tree.py @@ -1,25 +1,13 @@ from gudhi import SimplexTree -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_subsampling.py b/src/cython/test/test_subsampling.py index 96906a6f..eecb290a 100755 --- a/src/cython/test/test_subsampling.py +++ b/src/cython/test/test_subsampling.py @@ -1,25 +1,13 @@ import gudhi -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_tangential_complex.py b/src/cython/test/test_tangential_complex.py index 5c62f278..ce05b05f 100755 --- a/src/cython/test/test_tangential_complex.py +++ b/src/cython/test/test_tangential_complex.py @@ -1,25 +1,13 @@ from gudhi import TangentialComplex, SimplexTree -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" diff --git a/src/cython/test/test_witness_complex.py b/src/cython/test/test_witness_complex.py index bcbc521b..abc3b3b9 100755 --- a/src/cython/test/test_witness_complex.py +++ b/src/cython/test/test_witness_complex.py @@ -1,25 +1,13 @@ from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Vincent Rouvreau +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau Copyright (C) 2016 Inria - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" -- cgit v1.2.3 From aaf2b79181c1b0f935d2b2110f0e7f48fb98b35b Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Thu, 13 Jun 2019 17:56:22 +0200 Subject: Update src/Persistent_cohomology/example/plain_homology.cpp Co-Authored-By: Marc Glisse --- src/Persistent_cohomology/example/plain_homology.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp index be2dc4cc..1c70ba5d 100644 --- a/src/Persistent_cohomology/example/plain_homology.cpp +++ b/src/Persistent_cohomology/example/plain_homology.cpp @@ -75,7 +75,8 @@ int main() { st.initialize_filtration(); // Class for homology computation - // We want persistent homology to be computed for the maximal dimension in the complex (persistence_dim_max = true) + // By default, since the complex has dimension 1, only 0-dimensional homology would be computed. + // Here we also want persistent homology to be computed for the maximal dimension in the complex (persistence_dim_max = true) Persistent_cohomology pcoh(st, true); // Initialize the coefficient field Z/2Z for homology -- cgit v1.2.3 From 64e6c96afa4d1a95b8d6ee74ea910c1cb80ebbd9 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 14 Jun 2019 14:45:22 +0200 Subject: Remove gudhi_patches for CGAL and CGAL 4.11.0 is now the minimal CGAL version required. Modification of documentation accordingly --- CMakeGUDHIVersion.txt | 4 +- src/Alpha_complex/example/CMakeLists.txt | 4 +- src/Alpha_complex/test/CMakeLists.txt | 5 +- src/Alpha_complex/utilities/CMakeLists.txt | 5 +- src/Bottleneck_distance/benchmark/CMakeLists.txt | 4 +- src/Bottleneck_distance/example/CMakeLists.txt | 4 +- src/Bottleneck_distance/test/CMakeLists.txt | 4 +- src/Bottleneck_distance/utilities/CMakeLists.txt | 4 +- src/Doxyfile.in | 1 - src/GudhUI/CMakeLists.txt | 4 +- src/Nerve_GIC/example/CMakeLists.txt | 4 +- src/Nerve_GIC/test/CMakeLists.txt | 4 +- src/Nerve_GIC/utilities/CMakeLists.txt | 4 +- .../test/CMakeLists.txt | 16 +- .../utilities/persistence_intervals/CMakeLists.txt | 4 +- src/Persistent_cohomology/example/CMakeLists.txt | 21 +- src/Spatial_searching/example/CMakeLists.txt | 4 +- src/Spatial_searching/test/CMakeLists.txt | 2 +- src/Subsampling/example/CMakeLists.txt | 4 +- src/Subsampling/test/CMakeLists.txt | 4 +- src/Tangential_complex/benchmark/CMakeLists.txt | 4 +- src/Tangential_complex/example/CMakeLists.txt | 4 +- src/Tangential_complex/test/CMakeLists.txt | 4 +- src/Witness_complex/example/CMakeLists.txt | 4 +- src/Witness_complex/test/CMakeLists.txt | 4 +- src/Witness_complex/utilities/CMakeLists.txt | 4 +- .../modules/GUDHI_third_party_libraries.cmake | 32 +- src/cmake/modules/GUDHI_user_version_target.cmake | 9 +- src/common/doc/footer.html | 2 +- src/common/doc/main_page.md | 48 +- src/common/example/CMakeLists.txt | 4 +- .../Bottleneck_distance_CGAL_patches.txt | 3 - .../include/gudhi_patches/CGAL/Convex_hull.h | 56 - .../gudhi_patches/CGAL/Delaunay_triangulation.h | 933 ------------ src/common/include/gudhi_patches/CGAL/Epeck_d.h | 53 - src/common/include/gudhi_patches/CGAL/Epick_d.h | 71 - .../CGAL/IO/Triangulation_off_ostream.h | 320 ---- src/common/include/gudhi_patches/CGAL/Kd_tree.h | 582 ------- .../include/gudhi_patches/CGAL/Kd_tree_node.h | 586 ------- .../CGAL/NewKernel_d/Cartesian_LA_base.h | 177 --- .../CGAL/NewKernel_d/Cartesian_LA_functors.h | 344 ----- .../CGAL/NewKernel_d/Cartesian_base.h | 40 - .../CGAL/NewKernel_d/Cartesian_change_FT.h | 117 -- .../CGAL/NewKernel_d/Cartesian_complete.h | 33 - .../CGAL/NewKernel_d/Cartesian_filter_K.h | 79 - .../CGAL/NewKernel_d/Cartesian_filter_NT.h | 93 -- .../CGAL/NewKernel_d/Cartesian_per_dimension.h | 33 - .../CGAL/NewKernel_d/Cartesian_static_filters.h | 95 -- .../gudhi_patches/CGAL/NewKernel_d/Coaffine.h | 330 ---- .../CGAL/NewKernel_d/Define_kernel_types.h | 50 - .../CGAL/NewKernel_d/Dimension_base.h | 49 - .../CGAL/NewKernel_d/Filtered_predicate2.h | 137 -- .../CGAL/NewKernel_d/KernelD_converter.h | 199 --- .../CGAL/NewKernel_d/Kernel_2_interface.h | 104 -- .../CGAL/NewKernel_d/Kernel_3_interface.h | 102 -- .../CGAL/NewKernel_d/Kernel_d_interface.h | 298 ---- .../CGAL/NewKernel_d/Kernel_object_converter.h | 134 -- .../gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h | 175 --- .../CGAL/NewKernel_d/LA_eigen/constructors.h | 162 -- .../CGAL/NewKernel_d/Lazy_cartesian.h | 188 --- .../CGAL/NewKernel_d/Types/Aff_transformation.h | 59 - .../CGAL/NewKernel_d/Types/Hyperplane.h | 159 -- .../gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h | 88 -- .../gudhi_patches/CGAL/NewKernel_d/Types/Line.h | 66 - .../gudhi_patches/CGAL/NewKernel_d/Types/Ray.h | 66 - .../gudhi_patches/CGAL/NewKernel_d/Types/Segment.h | 121 -- .../gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h | 132 -- .../CGAL/NewKernel_d/Types/Weighted_point.h | 205 --- .../gudhi_patches/CGAL/NewKernel_d/Vector/array.h | 165 -- .../gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h | 213 --- ...f_iterator_to_points_from_iterator_to_vectors.h | 76 - ...determinant_of_iterator_to_points_from_points.h | 211 --- ...terminant_of_iterator_to_vectors_from_vectors.h | 201 --- .../Vector/determinant_of_points_from_vectors.h | 164 -- .../Vector/determinant_of_vectors_small_dim.h | 58 - .../determinant_of_vectors_small_dim_internal.h | 164 -- .../gudhi_patches/CGAL/NewKernel_d/Vector/mix.h | 46 - .../gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h | 145 -- .../gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h | 181 --- .../gudhi_patches/CGAL/NewKernel_d/Vector/vector.h | 167 -- .../CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h | 305 ---- .../CGAL/NewKernel_d/Wrapper/Hyperplane_d.h | 131 -- .../CGAL/NewKernel_d/Wrapper/Point_d.h | 284 ---- .../CGAL/NewKernel_d/Wrapper/Ref_count_obj.h | 120 -- .../CGAL/NewKernel_d/Wrapper/Segment_d.h | 133 -- .../CGAL/NewKernel_d/Wrapper/Sphere_d.h | 130 -- .../CGAL/NewKernel_d/Wrapper/Vector_d.h | 266 ---- .../CGAL/NewKernel_d/Wrapper/Weighted_point_d.h | 129 -- .../CGAL/NewKernel_d/function_objects_cartesian.h | 1355 ----------------- .../CGAL/NewKernel_d/functor_properties.h | 40 - .../gudhi_patches/CGAL/NewKernel_d/functor_tags.h | 363 ----- .../gudhi_patches/CGAL/NewKernel_d/static_int.h | 61 - .../gudhi_patches/CGAL/NewKernel_d/store_kernel.h | 104 -- .../include/gudhi_patches/CGAL/NewKernel_d/utils.h | 306 ---- .../CGAL/Orthogonal_incremental_neighbor_search.h | 620 -------- .../gudhi_patches/CGAL/Regular_triangulation.h | 1169 -------------- .../CGAL/Regular_triangulation_traits_adapter.h | 288 ---- .../CGAL/TDS_full_cell_default_storage_policy.h | 99 -- .../CGAL/TDS_full_cell_mirror_storage_policy.h | 71 - .../include/gudhi_patches/CGAL/Triangulation.h | 1424 ----------------- .../CGAL/Triangulation_data_structure.h | 1603 -------------------- .../CGAL/Triangulation_ds_full_cell.h | 311 ---- .../gudhi_patches/CGAL/Triangulation_ds_vertex.h | 154 -- .../gudhi_patches/CGAL/Triangulation_face.h | 111 -- .../gudhi_patches/CGAL/Triangulation_full_cell.h | 148 -- .../gudhi_patches/CGAL/Triangulation_vertex.h | 128 -- .../include/gudhi_patches/CGAL/argument_swaps.h | 88 -- .../gudhi_patches/CGAL/determinant_of_vectors.h | 117 -- .../CGAL/internal/Combination_enumerator.h | 148 -- .../CGAL/internal/Static_or_dynamic_array.h | 116 -- .../CGAL/internal/Triangulation/Dummy_TDS.h | 49 - .../Triangulation/Triangulation_ds_iterators.h | 154 -- .../CGAL/internal/Triangulation/utilities.h | 154 -- .../gudhi_patches/CGAL/iterator_from_indices.h | 75 - .../gudhi_patches/CGAL/transforming_iterator.h | 123 -- .../CGAL/transforming_pair_iterator.h | 127 -- src/common/include/gudhi_patches/CGAL/typeset.h | 117 -- .../Tangential_complex_CGAL_patches.txt | 82 - src/common/utilities/CMakeLists.txt | 4 +- src/cython/CMakeLists.txt | 30 +- src/cython/doc/alpha_complex_sum.inc | 34 +- src/cython/doc/bottleneck_distance_sum.inc | 22 +- src/cython/doc/conf.py | 2 +- src/cython/doc/cubical_complex_sum.inc | 2 +- src/cython/doc/nerve_gic_complex_sum.inc | 26 +- src/cython/doc/persistence_graphical_tools_sum.inc | 2 +- src/cython/doc/persistent_cohomology_sum.inc | 2 +- src/cython/doc/rips_complex_sum.inc | 2 +- src/cython/doc/simplex_tree_sum.inc | 2 +- src/cython/doc/tangential_complex_sum.inc | 22 +- src/cython/doc/witness_complex_sum.inc | 29 +- src/cython/gudhi.pyx.in | 4 +- 132 files changed, 184 insertions(+), 19657 deletions(-) delete mode 100644 src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt delete mode 100644 src/common/include/gudhi_patches/CGAL/Convex_hull.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Epeck_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Epick_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Kd_tree.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Kd_tree_node.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Define_kernel_types.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Dimension_base.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Filtered_predicate2.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h delete mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Regular_triangulation.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h delete mode 100644 src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h delete mode 100644 src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_face.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h delete mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h delete mode 100644 src/common/include/gudhi_patches/CGAL/argument_swaps.h delete mode 100644 src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h delete mode 100644 src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h delete mode 100644 src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h delete mode 100644 src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h delete mode 100644 src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h delete mode 100644 src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h delete mode 100644 src/common/include/gudhi_patches/CGAL/iterator_from_indices.h delete mode 100644 src/common/include/gudhi_patches/CGAL/transforming_iterator.h delete mode 100644 src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h delete mode 100644 src/common/include/gudhi_patches/CGAL/typeset.h delete mode 100644 src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index ebaddd47..eb2a0666 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,5 +1,5 @@ -set (GUDHI_MAJOR_VERSION 2) -set (GUDHI_MINOR_VERSION 3) +set (GUDHI_MAJOR_VERSION 3) +set (GUDHI_MINOR_VERSION 0) set (GUDHI_PATCH_VERSION 0) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index c62a220c..b069b443 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Alpha_complex_examples) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( Alpha_complex_example_from_points Alpha_complex_from_points.cpp ) target_link_libraries(Alpha_complex_example_from_points ${CGAL_LIBRARY}) add_executable ( Alpha_complex_example_from_off Alpha_complex_from_off.cpp ) @@ -26,9 +26,7 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) add_test(Alpha_complex_example_from_off_32_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_32.txt) endif() -endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( Alpha_complex_example_weighted_3d_from_points Weighted_alpha_complex_3d_from_points.cpp ) target_link_libraries(Alpha_complex_example_weighted_3d_from_points ${CGAL_LIBRARY}) if (TBB_FOUND) diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index 7c6bf9aa..ad5b6314 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -1,7 +1,7 @@ project(Alpha_complex_tests) include(GUDHI_test_coverage) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) @@ -14,9 +14,6 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) gudhi_add_coverage_test(Alpha_complex_test_unit) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) - -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( Alpha_complex_3d_test_unit Alpha_complex_3d_unit_test.cpp ) target_link_libraries(Alpha_complex_3d_test_unit ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) add_executable ( Weighted_alpha_complex_3d_test_unit Weighted_alpha_complex_3d_unit_test.cpp ) diff --git a/src/Alpha_complex/utilities/CMakeLists.txt b/src/Alpha_complex/utilities/CMakeLists.txt index e76edc5f..5295f3cd 100644 --- a/src/Alpha_complex/utilities/CMakeLists.txt +++ b/src/Alpha_complex/utilities/CMakeLists.txt @@ -1,6 +1,6 @@ project(Alpha_complex_utilities) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable (alpha_complex_persistence alpha_complex_persistence.cpp) target_link_libraries(alpha_complex_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) @@ -12,9 +12,6 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) install(TARGETS alpha_complex_persistence DESTINATION bin) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) - -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp) target_link_libraries(alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) if (TBB_FOUND) diff --git a/src/Bottleneck_distance/benchmark/CMakeLists.txt b/src/Bottleneck_distance/benchmark/CMakeLists.txt index 3105a1d5..77cb013d 100644 --- a/src/Bottleneck_distance/benchmark/CMakeLists.txt +++ b/src/Bottleneck_distance/benchmark/CMakeLists.txt @@ -1,8 +1,8 @@ project(Bottleneck_distance_benchmark) -if (NOT CGAL_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_VERSION VERSION_LESS 4.11.0) add_executable ( bottleneck_chrono bottleneck_chrono.cpp ) if (TBB_FOUND) target_link_libraries(bottleneck_chrono ${TBB_LIBRARIES}) endif(TBB_FOUND) -endif(NOT CGAL_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_VERSION VERSION_LESS 4.11.0) diff --git a/src/Bottleneck_distance/example/CMakeLists.txt b/src/Bottleneck_distance/example/CMakeLists.txt index c6f10127..8987ac39 100644 --- a/src/Bottleneck_distance/example/CMakeLists.txt +++ b/src/Bottleneck_distance/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Bottleneck_distance_examples) -if (NOT CGAL_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_VERSION VERSION_LESS 4.11.0) add_executable (bottleneck_basic_example bottleneck_basic_example.cpp) add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp) target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_PROGRAM_OPTIONS_LIBRARY}) @@ -18,4 +18,4 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) install(TARGETS bottleneck_basic_example DESTINATION bin) install(TARGETS alpha_rips_persistence_bottleneck_distance DESTINATION bin) -endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) +endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) diff --git a/src/Bottleneck_distance/test/CMakeLists.txt b/src/Bottleneck_distance/test/CMakeLists.txt index bb739280..ec2d045f 100644 --- a/src/Bottleneck_distance/test/CMakeLists.txt +++ b/src/Bottleneck_distance/test/CMakeLists.txt @@ -1,6 +1,6 @@ project(Bottleneck_distance_tests) -if (NOT CGAL_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_VERSION VERSION_LESS 4.11.0) include(GUDHI_test_coverage) add_executable ( Bottleneck_distance_test_unit bottleneck_unit_test.cpp ) @@ -11,4 +11,4 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) gudhi_add_coverage_test(Bottleneck_distance_test_unit) -endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) +endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) diff --git a/src/Bottleneck_distance/utilities/CMakeLists.txt b/src/Bottleneck_distance/utilities/CMakeLists.txt index 2f35885c..86d74cf5 100644 --- a/src/Bottleneck_distance/utilities/CMakeLists.txt +++ b/src/Bottleneck_distance/utilities/CMakeLists.txt @@ -1,6 +1,6 @@ project(Bottleneck_distance_utilities) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable (bottleneck_distance bottleneck_distance.cpp) if (TBB_FOUND) target_link_libraries(bottleneck_distance ${TBB_LIBRARIES}) @@ -12,4 +12,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) install(TARGETS bottleneck_distance DESTINATION bin) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Doxyfile.in b/src/Doxyfile.in index f80d4505..bb53d1ae 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -785,7 +785,6 @@ EXCLUDE = data/ \ GudhUI/ \ cmake/ \ src/cython/ \ - include/gudhi_patches/ \ README.md # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt index 0945e758..ab29097b 100644 --- a/src/GudhUI/CMakeLists.txt +++ b/src/GudhUI/CMakeLists.txt @@ -9,7 +9,7 @@ if (OPENGL_FOUND) find_package(QGLViewer QUIET) if ( QGLVIEWER_FOUND) - if ( CGAL_FOUND AND NOT CGAL_VERSION VERSION_EQUAL 4.8.0) + if ( CGAL_FOUND AND NOT CGAL_VERSION VERSION_LESS 4.11.0) set(CMAKE_AUTOMOC ON) set(CMAKE_AUTOUIC ON) set(CMAKE_INCLUDE_CURRENT_DIR ON) @@ -38,7 +38,7 @@ if (OPENGL_FOUND) install(TARGETS GudhUI DESTINATION bin) set(GUDHI_MODULES ${GUDHI_MODULES} "GudhUI" CACHE INTERNAL "GUDHI_MODULES") else() - message("++ GudhUI will not be compiled because CGAL < 4.8.0 or not found") + message("++ GudhUI will not be compiled because CGAL < 4.11.0 or not found") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "GudhUI" CACHE INTERNAL "GUDHI_MISSING_MODULES") endif() else() diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt index fdecf86e..1667472f 100644 --- a/src/Nerve_GIC/example/CMakeLists.txt +++ b/src/Nerve_GIC/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Nerve_GIC_examples) -if (NOT CGAL_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_VERSION VERSION_LESS 4.11.0) add_executable ( CoordGIC CoordGIC.cpp ) add_executable ( FuncGIC FuncGIC.cpp ) @@ -25,4 +25,4 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) install(TARGETS CoordGIC DESTINATION bin) install(TARGETS FuncGIC DESTINATION bin) -endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) +endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) diff --git a/src/Nerve_GIC/test/CMakeLists.txt b/src/Nerve_GIC/test/CMakeLists.txt index 99263ea0..b89c18a2 100644 --- a/src/Nerve_GIC/test/CMakeLists.txt +++ b/src/Nerve_GIC/test/CMakeLists.txt @@ -1,6 +1,6 @@ project(Graph_induced_complex_tests) -if (NOT CGAL_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_VERSION VERSION_LESS 4.11.0) include(GUDHI_test_coverage) add_executable ( Nerve_GIC_test_unit test_GIC.cpp ) @@ -13,4 +13,4 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) gudhi_add_coverage_test(Nerve_GIC_test_unit) -endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) +endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt index 215f9dfd..65a08d9a 100644 --- a/src/Nerve_GIC/utilities/CMakeLists.txt +++ b/src/Nerve_GIC/utilities/CMakeLists.txt @@ -1,6 +1,6 @@ project(Nerve_GIC_examples) -if (NOT CGAL_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_VERSION VERSION_LESS 4.11.0) add_executable ( Nerve Nerve.cpp ) add_executable ( VoronoiGIC VoronoiGIC.cpp ) @@ -24,4 +24,4 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) install(TARGETS VoronoiGIC DESTINATION bin) install(FILES KeplerMapperVisuFromTxtFile.py km.py km.py.COPYRIGHT DESTINATION bin) -endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) +endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) diff --git a/src/Persistence_representations/test/CMakeLists.txt b/src/Persistence_representations/test/CMakeLists.txt index fb650485..a95880c9 100644 --- a/src/Persistence_representations/test/CMakeLists.txt +++ b/src/Persistence_representations/test/CMakeLists.txt @@ -39,14 +39,14 @@ target_link_libraries(kernels_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) gudhi_add_coverage_test(kernels_unit) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) - add_executable (Persistence_intervals_with_distances_test_unit persistence_intervals_with_distances_test.cpp ) - target_link_libraries(Persistence_intervals_with_distances_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) - if (TBB_FOUND) - target_link_libraries(Persistence_intervals_with_distances_test_unit ${TBB_LIBRARIES}) - endif(TBB_FOUND) - gudhi_add_coverage_test(Persistence_intervals_with_distances_test_unit) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + add_executable (Persistence_intervals_with_distances_test_unit persistence_intervals_with_distances_test.cpp ) + target_link_libraries(Persistence_intervals_with_distances_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(Persistence_intervals_with_distances_test_unit ${TBB_LIBRARIES}) + endif(TBB_FOUND) + gudhi_add_coverage_test(Persistence_intervals_with_distances_test_unit) +endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt index 649b72cb..a025183e 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt +++ b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt @@ -22,7 +22,7 @@ add_test(NAME Persistence_representation_utilities_compute_number_of_dominant_in install(TARGETS compute_number_of_dominant_intervals DESTINATION bin) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( compute_bottleneck_distance compute_bottleneck_distance.cpp ) if (TBB_FOUND) target_link_libraries(compute_bottleneck_distance ${TBB_LIBRARIES}) @@ -34,4 +34,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) "${CMAKE_CURRENT_BINARY_DIR}/../second.pers") install(TARGETS compute_bottleneck_distance DESTINATION bin) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 0f731519..94ec13c5 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -53,17 +53,16 @@ if(GMP_FOUND) endif(GMPXX_FOUND) endif(GMP_FOUND) -if(CGAL_FOUND) - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) - add_executable(custom_persistence_sort custom_persistence_sort.cpp) - target_link_libraries(custom_persistence_sort ${CGAL_LIBRARY}) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - if (TBB_FOUND) - target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES}) - endif(TBB_FOUND) - add_test(NAME Persistent_cohomology_example_custom_persistence_sort COMMAND $) + add_executable(custom_persistence_sort custom_persistence_sort.cpp) + target_link_libraries(custom_persistence_sort ${CGAL_LIBRARY}) - install(TARGETS custom_persistence_sort DESTINATION bin) + if (TBB_FOUND) + target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES}) + endif(TBB_FOUND) + add_test(NAME Persistent_cohomology_example_custom_persistence_sort COMMAND $) - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) -endif(CGAL_FOUND) + install(TARGETS custom_persistence_sort DESTINATION bin) + +endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Spatial_searching/example/CMakeLists.txt b/src/Spatial_searching/example/CMakeLists.txt index 0f799987..eeb3e85f 100644 --- a/src/Spatial_searching/example/CMakeLists.txt +++ b/src/Spatial_searching/example/CMakeLists.txt @@ -1,9 +1,9 @@ project(Spatial_searching_examples) -if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable( Spatial_searching_example_spatial_searching example_spatial_searching.cpp ) target_link_libraries(Spatial_searching_example_spatial_searching ${CGAL_LIBRARY}) add_test(NAME Spatial_searching_example_spatial_searching COMMAND $) install(TARGETS Spatial_searching_example_spatial_searching DESTINATION bin) -endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Spatial_searching/test/CMakeLists.txt b/src/Spatial_searching/test/CMakeLists.txt index b60ab1e3..18f7c6b8 100644 --- a/src/Spatial_searching/test/CMakeLists.txt +++ b/src/Spatial_searching/test/CMakeLists.txt @@ -1,6 +1,6 @@ project(Spatial_searching_tests) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) include(GUDHI_test_coverage) add_executable( Spatial_searching_test_Kd_tree_search test_Kd_tree_search.cpp ) diff --git a/src/Subsampling/example/CMakeLists.txt b/src/Subsampling/example/CMakeLists.txt index f26d107f..28aab103 100644 --- a/src/Subsampling/example/CMakeLists.txt +++ b/src/Subsampling/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Subsampling_examples) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable(Subsampling_example_pick_n_random_points example_pick_n_random_points.cpp) add_executable(Subsampling_example_choose_n_farthest_points example_choose_n_farthest_points.cpp) add_executable(Subsampling_example_custom_kernel example_custom_kernel.cpp) @@ -19,4 +19,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) install(TARGETS Subsampling_example_custom_kernel DESTINATION bin) install(TARGETS Subsampling_example_sparsify_point_set DESTINATION bin) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Subsampling/test/CMakeLists.txt b/src/Subsampling/test/CMakeLists.txt index 924f0925..cf54788e 100644 --- a/src/Subsampling/test/CMakeLists.txt +++ b/src/Subsampling/test/CMakeLists.txt @@ -1,6 +1,6 @@ project(Subsampling_tests) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) include(GUDHI_test_coverage) add_executable( Subsampling_test_pick_n_random_points test_pick_n_random_points.cpp ) @@ -15,4 +15,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) gudhi_add_coverage_test(Subsampling_test_pick_n_random_points) gudhi_add_coverage_test(Subsampling_test_choose_n_farthest_points) gudhi_add_coverage_test(Subsampling_test_sparsify_point_set) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt index f136ab27..621b0fd7 100644 --- a/src/Tangential_complex/benchmark/CMakeLists.txt +++ b/src/Tangential_complex/benchmark/CMakeLists.txt @@ -1,9 +1,9 @@ project(Tangential_complex_benchmark) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable(Tangential_complex_benchmark benchmark_tc.cpp) target_link_libraries(Tangential_complex_benchmark ${CGAL_LIBRARY}) if (TBB_FOUND) target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES}) endif(TBB_FOUND) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt index af0dac51..cb1486a4 100644 --- a/src/Tangential_complex/example/CMakeLists.txt +++ b/src/Tangential_complex/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Tangential_complex_examples) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable( Tangential_complex_example_basic example_basic.cpp ) target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY}) add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp ) @@ -17,4 +17,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) install(TARGETS Tangential_complex_example_basic DESTINATION bin) install(TARGETS Tangential_complex_example_with_perturb DESTINATION bin) -endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Tangential_complex/test/CMakeLists.txt b/src/Tangential_complex/test/CMakeLists.txt index 902f19af..ae17a286 100644 --- a/src/Tangential_complex/test/CMakeLists.txt +++ b/src/Tangential_complex/test/CMakeLists.txt @@ -1,6 +1,6 @@ project(Tangential_complex_tests) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) include(GUDHI_test_coverage) add_executable( Tangential_complex_test_TC test_tangential_complex.cpp ) @@ -10,4 +10,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) endif() gudhi_add_coverage_test(Tangential_complex_test_TC) -endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt index 3d838c0d..5860f3a3 100644 --- a/src/Witness_complex/example/CMakeLists.txt +++ b/src/Witness_complex/example/CMakeLists.txt @@ -10,7 +10,7 @@ add_test(NAME Witness_complex_example_nearest_landmark_table install(TARGETS Witness_complex_example_nearest_landmark_table DESTINATION bin) # CGAL and Eigen3 are required for Euclidean version of Witness -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable( Witness_complex_example_off example_witness_complex_off.cpp ) add_executable ( Witness_complex_example_sphere example_witness_complex_sphere.cpp ) @@ -31,4 +31,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) install(TARGETS Witness_complex_example_strong_off DESTINATION bin) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Witness_complex/test/CMakeLists.txt b/src/Witness_complex/test/CMakeLists.txt index 58ac60c5..96188e46 100644 --- a/src/Witness_complex/test/CMakeLists.txt +++ b/src/Witness_complex/test/CMakeLists.txt @@ -11,7 +11,7 @@ endif(TBB_FOUND) gudhi_add_coverage_test(Witness_complex_test_simple_witness_complex) # CGAL and Eigen3 are required for Euclidean version of Witness -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( Witness_complex_test_euclidean_simple_witness_complex test_euclidean_simple_witness_complex.cpp ) target_link_libraries(Witness_complex_test_euclidean_simple_witness_complex ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) if (TBB_FOUND) @@ -19,4 +19,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) endif(TBB_FOUND) gudhi_add_coverage_test(Witness_complex_test_euclidean_simple_witness_complex) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/Witness_complex/utilities/CMakeLists.txt b/src/Witness_complex/utilities/CMakeLists.txt index ce5e29f2..3ee0c2f6 100644 --- a/src/Witness_complex/utilities/CMakeLists.txt +++ b/src/Witness_complex/utilities/CMakeLists.txt @@ -1,7 +1,7 @@ project(Witness_complex_utilities) # CGAL and Eigen3 are required for Euclidean version of Witness -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( Witness_complex_strong_witness_persistence strong_witness_persistence.cpp ) target_link_libraries(Witness_complex_strong_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY}) @@ -24,4 +24,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) install(TARGETS Witness_complex_strong_witness_persistence DESTINATION bin) install(TARGETS Witness_complex_weak_witness_persistence DESTINATION bin) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index 57ea7d14..477900ae 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -23,42 +23,14 @@ endif() # but it implies to use cmake version 3.1 at least. find_package(CGAL QUIET) -# Only CGAL versions > 4.4 supports what Gudhi uses from CGAL -if (CGAL_VERSION VERSION_LESS 4.4.0) +# Only CGAL versions > 4.11 supports what Gudhi uses from CGAL +if (CGAL_FOUND AND CGAL_VERSION VERSION_LESS 4.11.0) message("++ CGAL version ${CGAL_VERSION} is considered too old to be used by Gudhi.") unset(CGAL_FOUND) endif() if(CGAL_FOUND) message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) - - if (NOT CGAL_VERSION VERSION_LESS 4.8.0) - # HACK to detect CGAL version 4.8.0 - # CGAL version 4.8, 4.8.1 and 4.8.2 are identified as version 4.8.1000) - # cf. https://github.com/CGAL/cgal/issues/1559 - # Limit the HACK between CGAL versions 4.8 and 4.9 because of file read - if (NOT CGAL_VERSION VERSION_GREATER 4.9.0) - foreach(CGAL_INCLUDE_DIR ${CGAL_INCLUDE_DIRS}) - if (EXISTS "${CGAL_INCLUDE_DIR}/CGAL/version.h") - FILE(READ "${CGAL_INCLUDE_DIR}/CGAL/version.h" contents) - STRING(REGEX REPLACE "\n" ";" contents "${contents}") - foreach(Line ${contents}) - if("${Line}" STREQUAL "#define CGAL_VERSION 4.8") - set(CGAL_VERSION 4.8.0) - message (">>>>> HACK CGAL version to ${CGAL_VERSION}") - endif("${Line}" STREQUAL "#define CGAL_VERSION 4.8") - endforeach(Line ${contents}) - endif (EXISTS "${CGAL_INCLUDE_DIR}/CGAL/version.h") - endforeach(CGAL_INCLUDE_DIR ${CGAL_INCLUDE_DIRS}) - endif(NOT CGAL_VERSION VERSION_GREATER 4.9.0) - - if (CGAL_VERSION VERSION_LESS 4.11.0) - # For dev version - include_directories(BEFORE "src/common/include/gudhi_patches") - # For user version - include_directories(BEFORE "include/gudhi_patches") - endif () - endif() endif() option(WITH_GUDHI_USE_TBB "Build with Intel TBB parallelization" ON) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index 2ed48c48..91eee6b5 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -29,7 +29,7 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/README.md ${GUDHI_USER_VERSION_DIR}/README.md) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E - copy ${CMAKE_SOURCE_DIR}/COPYING ${GUDHI_USER_VERSION_DIR}/COPYING) + copy ${CMAKE_SOURCE_DIR}/LICENSE ${GUDHI_USER_VERSION_DIR}/LICENSE) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/src/CMakeLists.txt ${GUDHI_USER_VERSION_DIR}/CMakeLists.txt) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E @@ -51,11 +51,8 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI) set(GUDHI_DIRECTORIES "doc;example;concept;utilities") -if (CGAL_VERSION VERSION_LESS 4.11.0) - set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches") -else () - set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi") -endif () + +set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi") foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST}) foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES}) diff --git a/src/common/doc/footer.html b/src/common/doc/footer.html index a557922b..4168c6bc 100644 --- a/src/common/doc/footer.html +++ b/src/common/doc/footer.html @@ -6,7 +6,7 @@ $projectname  Version $projectnumber  - $projectbrief - - Copyright : GPL v3 + - Copyright : MIT diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index 98169f82..18bf0ce2 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -19,7 +19,7 @@ Author: Pawel Dlotko
Introduced in: GUDHI 1.3.0
- Copyright: GPL v3
+ Copyright: MIT
@@ -49,10 +49,9 @@ Author: Vincent Rouvreau
Introduced in: GUDHI 1.3.0
- Copyright: GPL v3
+ Copyright: MIT [(GPL v3)](../../licensing/)
Requires: \ref eigen3 and
- \ref cgal ≥ 4.7.0 for Alpha_complex
- \ref cgal ≥ 4.11.0 for Alpha_complex_3d + \ref cgal 4.11.0 @@ -76,7 +75,8 @@ Author: Vincent Rouvreau
Introduced in: GUDHI 2.2.0
- Copyright: GPL v3
+ Copyright: MIT [(GPL v3)](../../licensing/)
+ Includes: [Miniball](https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html)
@@ -102,7 +102,7 @@ Author: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse
Introduced in: GUDHI 2.0.0
- Copyright: GPL v3
+ Copyright: MIT
@@ -126,8 +126,8 @@ Author: Siargey Kachanovich
Introduced in: GUDHI 1.3.0
- Copyright: GPL v3
- Euclidean version requires: \ref cgal ≥ 4.6.0 and \ref eigen3 + Copyright: MIT ([GPL v3](../../licensing/) for Euclidean version)
+ Euclidean version requires: \ref cgal ≥ 4.11.0 and \ref eigen3 @@ -152,8 +152,8 @@ Author: Mathieu Carrière
Introduced in: GUDHI 2.1.0
- Copyright: GPL v3
- Requires: \ref cgal ≥ 4.8.1 + Copyright: MIT [(GPL v3)](../../licensing/)
+ Requires: \ref cgal ≥ 4.11.0 @@ -181,7 +181,7 @@ Author: Clément Maria
Introduced in: GUDHI 1.0.0
- Copyright: GPL v3
+ Copyright: MIT
@@ -209,7 +209,7 @@ Author: David Salinas
Introduced in: GUDHI 1.1.0
- Copyright: GPL v3
+ Copyright: MIT
@@ -234,7 +234,7 @@ Author: François Godi
Introduced in: GUDHI 2.1.0
- Copyright: GPL v3
+ Copyright: MIT
@@ -253,17 +253,17 @@ \image html "sphere_contraction_representation.png" - - Author: David Salinas
- Introduced in: GUDHI 1.1.0
- Copyright: GPL v3
- The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice, the size of this data-structure is even much lower than the total number of simplices. + + Author: David Salinas
+ Introduced in: GUDHI 1.1.0
+ Copyright: MIT [(GPL v3)](../../licensing/)
+ @@ -294,7 +294,7 @@ Author: Clément Maria
Introduced in: GUDHI 1.0.0
- Copyright: GPL v3
+ Copyright: MIT
@@ -323,8 +323,8 @@ Author: Clément Jamin
Introduced in: GUDHI 2.0.0
- Copyright: GPL v3
- Requires: \ref cgal ≥ 4.8.1 and \ref eigen3 + Copyright: MIT [(GPL v3)](../../licensing/)
+ Requires: \ref cgal ≥ 4.11.0 and \ref eigen3 @@ -354,8 +354,8 @@ Author: François Godi
Introduced in: GUDHI 2.0.0
- Copyright: GPL v3
- Requires: \ref cgal ≥ 4.8.1 + Copyright: MIT [(GPL v3)](../../licensing/)
+ Requires: \ref cgal ≥ 4.11.0 @@ -380,7 +380,7 @@ Author: Pawel Dlotko
Introduced in: GUDHI 2.1.0
- Copyright: GPL v3
+ Copyright: MIT
diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 04015cdc..b9263cc9 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -25,12 +25,12 @@ if(CGAL_FOUND) install(TARGETS cgal_3D_off_reader DESTINATION bin) # need CGAL 4.7 and Eigen3 - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) + if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( cgal_off_reader example_CGAL_points_off_reader.cpp ) target_link_libraries(cgal_off_reader ${CGAL_LIBRARY}) add_test(NAME Common_example_vector_cgal_off_reader COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off") install(TARGETS cgal_off_reader DESTINATION bin) - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) + endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) endif() diff --git a/src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt b/src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt deleted file mode 100644 index a588d113..00000000 --- a/src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt +++ /dev/null @@ -1,3 +0,0 @@ -CGAL/Kd_tree.h -CGAL/Kd_tree_node.h -CGAL/Orthogonal_incremental_neighbor_search.h diff --git a/src/common/include/gudhi_patches/CGAL/Convex_hull.h b/src/common/include/gudhi_patches/CGAL/Convex_hull.h deleted file mode 100644 index a8f91bf8..00000000 --- a/src/common/include/gudhi_patches/CGAL/Convex_hull.h +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -/* RANDOM DESIGN IDEAS: -- Use a policy tag to choose for incremental with inserts only or - incremental with removals and inserts. - In the first case: use Triangulation for storage. - In the second case: use Delaunay ! - In this second case, we must keeps the points that are inserted in the hull, - as they may become part of the boundary later on, when some points are removed. -- Constructor with range argument uses quickhull. -*/ - -#ifndef CGAL_CONVEX_HULL_H -#define CGAL_CONVEX_HULL_H - -namespace CGAL { - -template < class CHTraits, class TDS_ = Default > -class Convex_hull -{ - typedef typename Maximal_dimension::type - Maximal_dimension_; - typedef typename Default::Get, - Triangulation_full_cell > - >::type TDS; - typedef Convex_hull Self; - - typedef typename CHTraits::Coaffine_orientation_d - Coaffine_orientation_d; - typedef typename CHTraits::Orientation_d Orientation_d; - -public: -}; - -} //namespace CGAL - -#endif // CGAL_CONVEX_HULL_H diff --git a/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h b/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h deleted file mode 100644 index 071cd184..00000000 --- a/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h +++ /dev/null @@ -1,933 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_DELAUNAY_COMPLEX_H -#define CGAL_DELAUNAY_COMPLEX_H - -#include -#include -#include -#include - -#include - -#include - -namespace CGAL { - -template< typename DCTraits, typename _TDS = Default > -class Delaunay_triangulation -: public Triangulation, - Triangulation_full_cell > - >::type > -{ - typedef typename DCTraits::Dimension Maximal_dimension_; - typedef typename Default::Get<_TDS, Triangulation_data_structure< - Maximal_dimension_, - Triangulation_vertex, - Triangulation_full_cell > - >::type TDS; - typedef Triangulation Base; - typedef Delaunay_triangulation Self; - - typedef typename DCTraits::Side_of_oriented_sphere_d - Side_of_oriented_sphere_d; - typedef typename DCTraits::Orientation_d Orientation_d; - -public: // PUBLIC NESTED TYPES - - typedef DCTraits Geom_traits; - typedef typename Base::Triangulation_ds Triangulation_ds; - - typedef typename Base::Vertex Vertex; - typedef typename Base::Full_cell Full_cell; - typedef typename Base::Facet Facet; - typedef typename Base::Face Face; - - typedef typename Base::Maximal_dimension Maximal_dimension; - typedef typename DCTraits::Point_d Point; - typedef typename DCTraits::Point_d Point_d; - - typedef typename Base::Vertex_handle Vertex_handle; - typedef typename Base::Vertex_iterator Vertex_iterator; - typedef typename Base::Vertex_const_handle Vertex_const_handle; - typedef typename Base::Vertex_const_iterator Vertex_const_iterator; - - typedef typename Base::Full_cell_handle Full_cell_handle; - typedef typename Base::Full_cell_iterator Full_cell_iterator; - typedef typename Base::Full_cell_const_handle Full_cell_const_handle; - typedef typename Base::Full_cell_const_iterator Full_cell_const_iterator; - typedef typename Base::Finite_full_cell_const_iterator - Finite_full_cell_const_iterator; - - typedef typename Base::size_type size_type; - typedef typename Base::difference_type difference_type; - - typedef typename Base::Locate_type Locate_type; - - //Tag to distinguish triangulations with weighted_points - typedef Tag_false Weighted_tag; - -protected: // DATA MEMBERS - - -public: - - using typename Base::Rotor; - using Base::maximal_dimension; - using Base::are_incident_full_cells_valid; - using Base::coaffine_orientation_predicate; - using Base::reset_flat_orientation; - using Base::current_dimension; - //using Base::star; - //using Base::incident_full_cells; - using Base::geom_traits; - using Base::index_of_covertex; - //using Base::index_of_second_covertex; - using Base::infinite_vertex; - using Base::rotate_rotor; - using Base::insert_in_hole; - using Base::insert_outside_convex_hull_1; - using Base::is_infinite; - using Base::locate; - using Base::points_begin; - using Base::set_neighbors; - using Base::new_full_cell; - using Base::number_of_vertices; - using Base::orientation; - using Base::tds; - using Base::reorient_full_cells; - using Base::full_cell; - using Base::full_cells_begin; - using Base::full_cells_end; - using Base::finite_full_cells_begin; - using Base::finite_full_cells_end; - using Base::vertices_begin; - using Base::vertices_end; - // using Base:: - -private: - //*** Side_of_oriented_subsphere_d *** - typedef typename Base::Flat_orientation_d Flat_orientation_d; - typedef typename Base::Construct_flat_orientation_d Construct_flat_orientation_d; - typedef typename DCTraits::In_flat_side_of_oriented_sphere_d In_flat_side_of_oriented_sphere_d; - // Wrapper - struct Side_of_oriented_subsphere_d - { - boost::optional* fop; - Construct_flat_orientation_d cfo; - In_flat_side_of_oriented_sphere_d ifsoos; - - Side_of_oriented_subsphere_d( - boost::optional& x, - Construct_flat_orientation_d const&y, - In_flat_side_of_oriented_sphere_d const&z) - : fop(&x), cfo(y), ifsoos(z) {} - - template - CGAL::Orientation operator()(Iter a, Iter b, const Point & p)const - { - if(!*fop) - *fop=cfo(a,b); - return ifsoos(fop->get(),a,b,p); - } - }; -public: - -// - - - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS - - Delaunay_triangulation(int dim, const Geom_traits &k = Geom_traits()) - : Base(dim, k) - { - } - - // With this constructor, - // the user can specify a Flat_orientation_d object to be used for - // orienting simplices of a specific dimension - // (= preset_flat_orientation_.first) - // It it used by the dark triangulations created by DT::remove - Delaunay_triangulation( - int dim, - const std::pair &preset_flat_orientation, - const Geom_traits &k = Geom_traits()) - : Base(dim, preset_flat_orientation, k) - { - } - - ~Delaunay_triangulation() {} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS - - // Not Documented - Side_of_oriented_subsphere_d side_of_oriented_subsphere_predicate() const - { - return Side_of_oriented_subsphere_d ( - flat_orientation_, - geom_traits().construct_flat_orientation_d_object(), - geom_traits().in_flat_side_of_oriented_sphere_d_object() - ); - } - - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS - - Full_cell_handle remove(Vertex_handle); - Full_cell_handle remove(const Point & p, Full_cell_handle hint = Full_cell_handle()) - { - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle s = locate(p, lt, f, ft, hint); - if( Base::ON_VERTEX == lt ) - { - return remove(s->vertex(f.index(0))); - } - return Full_cell_handle(); - } - - template< typename ForwardIterator > - void remove(ForwardIterator start, ForwardIterator end) - { - while( start != end ) - remove(*start++); - } - - // Not documented - void remove_decrease_dimension(Vertex_handle); - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS - - template< typename ForwardIterator > - size_type insert(ForwardIterator start, ForwardIterator end) - { - size_type n = number_of_vertices(); - std::vector points(start, end); - spatial_sort(points.begin(), points.end(), geom_traits()); - Full_cell_handle hint; - for( typename std::vector::const_iterator p = points.begin(); p != points.end(); ++p ) - { - hint = insert(*p, hint)->full_cell(); - } - return number_of_vertices() - n; - } - Vertex_handle insert(const Point &, Locate_type, const Face &, const Facet &, Full_cell_handle); - Vertex_handle insert(const Point & p, Full_cell_handle start = Full_cell_handle()) - { - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle s = locate(p, lt, f, ft, start); - return insert(p, lt, f, ft, s); - } - Vertex_handle insert(const Point & p, Vertex_handle hint) - { - CGAL_assertion( Vertex_handle() != hint ); - return insert(p, hint->full_cell()); - } - Vertex_handle insert_outside_affine_hull(const Point &); - Vertex_handle insert_in_conflicting_cell(const Point &, Full_cell_handle); - -// - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES - - bool is_in_conflict(const Point &, Full_cell_const_handle) const; - template< class OrientationPredicate > - Oriented_side perturbed_side_of_positive_sphere(const Point &, - Full_cell_const_handle, const OrientationPredicate &) const; - - template< typename OutputIterator > - Facet compute_conflict_zone(const Point &, Full_cell_handle, OutputIterator) const; - - template < typename OrientationPredicate, typename SideOfOrientedSpherePredicate > - class Conflict_predicate - { - const Self & dc_; - const Point & p_; - OrientationPredicate ori_; - SideOfOrientedSpherePredicate side_of_s_; - int cur_dim_; - public: - Conflict_predicate( - const Self & dc, - const Point & p, - const OrientationPredicate & ori, - const SideOfOrientedSpherePredicate & side) - : dc_(dc), p_(p), ori_(ori), side_of_s_(side), cur_dim_(dc.current_dimension()) {} - - inline - bool operator()(Full_cell_const_handle s) const - { - bool ok; - if( ! dc_.is_infinite(s) ) - { - Oriented_side side = side_of_s_(dc_.points_begin(s), dc_.points_begin(s) + cur_dim_ + 1, p_); - if( ON_POSITIVE_SIDE == side ) - ok = true; - else if( ON_NEGATIVE_SIDE == side ) - ok = false; - else - ok = ON_POSITIVE_SIDE == dc_.perturbed_side_of_positive_sphere(p_, s, ori_); - } - else - { - typedef typename Full_cell::Vertex_handle_const_iterator VHCI; - typedef Substitute_point_in_vertex_iterator F; - F spivi(dc_.infinite_vertex(), &p_); - - Orientation o = ori_( - boost::make_transform_iterator(s->vertices_begin(), spivi), - boost::make_transform_iterator(s->vertices_begin() + cur_dim_ + 1, - spivi)); - - if( POSITIVE == o ) - ok = true; - else if( o == NEGATIVE ) - ok = false; - else - ok = (*this)(s->neighbor( s->index( dc_.infinite_vertex() ) )); - } - return ok; - } - }; - - template < typename ConflictPredicate > - class Conflict_traversal_predicate - { - const Self & dc_; - const ConflictPredicate & pred_; - public: - Conflict_traversal_predicate(const Self & dc, const ConflictPredicate & pred) - : dc_(dc), pred_(pred) - {} - inline - bool operator()(const Facet & f) const - { - return pred_(dc_.full_cell(f)->neighbor(dc_.index_of_covertex(f))); - } - }; - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY - - bool is_valid(bool verbose = false, int level = 0) const; - -private: - // Some internal types to shorten notation - typedef typename Base::Coaffine_orientation_d Coaffine_orientation_d; - using Base::flat_orientation_; - typedef Conflict_predicate - Conflict_pred_in_subspace; - typedef Conflict_predicate - Conflict_pred_in_fullspace; - typedef Conflict_traversal_predicate - Conflict_traversal_pred_in_subspace; - typedef Conflict_traversal_predicate - Conflict_traversal_pred_in_fullspace; -}; - -// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = -// FUNCTIONS THAT ARE MEMBER METHODS: - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS - -template< typename DCTraits, typename TDS > -typename Delaunay_triangulation::Full_cell_handle -Delaunay_triangulation -::remove( Vertex_handle v ) -{ - CGAL_precondition( ! is_infinite(v) ); - CGAL_expensive_precondition( is_vertex(v) ); - - // THE CASE cur_dim == 0 - if( 0 == current_dimension() ) - { - remove_decrease_dimension(v); - return Full_cell_handle(); - } - else if( 1 == current_dimension() ) - { // THE CASE cur_dim == 1 - if( 2 == number_of_vertices() ) - { - remove_decrease_dimension(v); - return Full_cell_handle(); - } - Full_cell_handle left = v->full_cell(); - if( 0 == left->index(v) ) - left = left->neighbor(1); - CGAL_assertion( 1 == left->index(v) ); - Full_cell_handle right = left->neighbor(0); - - tds().associate_vertex_with_full_cell(left, 1, right->vertex(1)); - set_neighbors(left, 0, right->neighbor(0), right->mirror_index(0)); - - tds().delete_vertex(v); - tds().delete_full_cell(right); - return left; - } - - // THE CASE cur_dim >= 2 - // Gather the finite vertices sharing an edge with |v| - typedef typename Base::template Full_cell_set Simplices; - Simplices simps; - std::back_insert_iterator out(simps); - tds().incident_full_cells(v, out); - typedef std::set Vertex_set; - Vertex_set verts; - Vertex_handle vh; - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - for( int i = 0; i <= current_dimension(); ++i ) - { - vh = (*it)->vertex(i); - if( is_infinite(vh) ) - continue; - if( vh == v ) - continue; - verts.insert(vh); - } - - // After gathering finite neighboring vertices, create their Dark Delaunay triangulation - typedef Triangulation_vertex Dark_vertex_base; - typedef Triangulation_full_cell > Dark_full_cell_base; - typedef Triangulation_data_structure Dark_tds; - typedef Delaunay_triangulation Dark_triangulation; - typedef typename Dark_triangulation::Face Dark_face; - typedef typename Dark_triangulation::Facet Dark_facet; - typedef typename Dark_triangulation::Vertex_handle Dark_v_handle; - typedef typename Dark_triangulation::Full_cell_handle Dark_s_handle; - - // If flat_orientation_ is defined, we give it the Dark triangulation - // so that the orientation it uses for "current_dimension()"-simplices is - // coherent with the global triangulation - Dark_triangulation dark_side( - maximal_dimension(), - flat_orientation_ ? - std::pair(current_dimension(), flat_orientation_.get_ptr()) - : std::pair((std::numeric_limits::max)(), (Flat_orientation_d*) NULL) ); - - Dark_s_handle dark_s; - Dark_v_handle dark_v; - typedef std::map Vertex_map; - Vertex_map light_to_dark; - typename Vertex_set::iterator vit = verts.begin(); - while( vit != verts.end() ) - { - dark_v = dark_side.insert((*vit)->point(), dark_s); - dark_s = dark_v->full_cell(); - dark_v->data() = *vit; - light_to_dark[*vit] = dark_v; - ++vit; - } - - if( dark_side.current_dimension() != current_dimension() ) - { - CGAL_assertion( dark_side.current_dimension() + 1 == current_dimension() ); - // Here, the finite neighbors of |v| span a affine subspace of - // dimension one less than the current dimension. Two cases are possible: - if( (size_type)(verts.size() + 1) == number_of_vertices() ) - { - remove_decrease_dimension(v); - return Full_cell_handle(); - } - else - { // |v| is strictly outside the convex hull of the rest of the points. This is an - // easy case: first, modify the finite full_cells, then, delete the infinite ones. - // We don't even need the Dark triangulation. - Simplices infinite_simps; - { - Simplices finite_simps; - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - if( is_infinite(*it) ) - infinite_simps.push_back(*it); - else - finite_simps.push_back(*it); - simps.swap(finite_simps); - } // now, simps only contains finite simplices - // First, modify the finite full_cells: - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - { - int v_idx = (*it)->index(v); - tds().associate_vertex_with_full_cell(*it, v_idx, infinite_vertex()); - } - // Make the handles to infinite full cells searchable - infinite_simps.make_searchable(); - // Then, modify the neighboring relation - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - { - for( int i = 0; i <= current_dimension(); ++i ) - { - if (is_infinite((*it)->vertex(i))) - continue; - (*it)->vertex(i)->set_full_cell(*it); - Full_cell_handle n = (*it)->neighbor(i); - // Was |n| a finite full cell prior to removing |v| ? - if( ! infinite_simps.contains(n) ) - continue; - int n_idx = n->index(v); - set_neighbors(*it, i, n->neighbor(n_idx), n->neighbor(n_idx)->index(n)); - } - } - Full_cell_handle ret_s; - // Then, we delete the infinite full_cells - for( typename Simplices::iterator it = infinite_simps.begin(); it != infinite_simps.end(); ++it ) - tds().delete_full_cell(*it); - tds().delete_vertex(v); - return simps.front(); - } - } - else // From here on, dark_side.current_dimension() == current_dimension() - { - dark_side.infinite_vertex()->data() = infinite_vertex(); - light_to_dark[infinite_vertex()] = dark_side.infinite_vertex(); - } - - // Now, compute the conflict zone of v->point() in - // the dark side. This is precisely the set of full_cells - // that we have to glue back into the light side. - Dark_face dark_f(dark_side.maximal_dimension()); - Dark_facet dark_ft; - typename Dark_triangulation::Locate_type lt; - dark_s = dark_side.locate(v->point(), lt, dark_f, dark_ft); - CGAL_assertion( lt != Dark_triangulation::ON_VERTEX - && lt != Dark_triangulation::OUTSIDE_AFFINE_HULL ); - - // |ret_s| is the full_cell that we return - Dark_s_handle dark_ret_s = dark_s; - Full_cell_handle ret_s; - - typedef typename Base::template Full_cell_set Dark_full_cells; - Dark_full_cells conflict_zone; - std::back_insert_iterator dark_out(conflict_zone); - - dark_ft = dark_side.compute_conflict_zone(v->point(), dark_s, dark_out); - // Make the dark simplices in the conflict zone searchable - conflict_zone.make_searchable(); - - // THE FOLLOWING SHOULD MAYBE GO IN TDS. - // Here is the plan: - // 1. Pick any Facet from boundary of the light zone - // 2. Find corresponding Facet on boundary of dark zone - // 3. stitch. - - // 1. Build a facet on the boudary of the light zone: - Full_cell_handle light_s = *simps.begin(); - Facet light_ft(light_s, light_s->index(v)); - - // 2. Find corresponding Dark_facet on boundary of the dark zone - Dark_full_cells dark_incident_s; - for( int i = 0; i <= current_dimension(); ++i ) - { - if( index_of_covertex(light_ft) == i ) - continue; - Dark_v_handle dark_v = light_to_dark[full_cell(light_ft)->vertex(i)]; - dark_incident_s.clear(); - dark_out = std::back_inserter(dark_incident_s); - dark_side.tds().incident_full_cells(dark_v, dark_out); - for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it ) - { - (*it)->data().count_ += 1; - } - } - - for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it ) - { - if( current_dimension() != (*it)->data().count_ ) - continue; - if( ! conflict_zone.contains(*it) ) - continue; - // We found a full_cell incident to the dark facet corresponding to the light facet |light_ft| - int ft_idx = 0; - while( light_s->has_vertex( (*it)->vertex(ft_idx)->data() ) ) - ++ft_idx; - dark_ft = Dark_facet(*it, ft_idx); - break; - } - // Pre-3. Now, we are ready to traverse both boundary and do the stiching. - - // But first, we create the new full_cells in the light triangulation, - // with as much adjacency information as possible. - - // Create new full_cells with vertices - for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) - { - Full_cell_handle new_s = new_full_cell(); - (*it)->data().light_copy_ = new_s; - for( int i = 0; i <= current_dimension(); ++i ) - tds().associate_vertex_with_full_cell(new_s, i, (*it)->vertex(i)->data()); - if( dark_ret_s == *it ) - ret_s = new_s; - } - - // Setup adjacencies inside the hole - for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) - { - Full_cell_handle new_s = (*it)->data().light_copy_; - for( int i = 0; i <= current_dimension(); ++i ) - if( conflict_zone.contains((*it)->neighbor(i)) ) - tds().set_neighbors(new_s, i, (*it)->neighbor(i)->data().light_copy_, (*it)->mirror_index(i)); - } - - // 3. Stitch - simps.make_searchable(); - typedef std::queue > Queue; - Queue q; - q.push(std::make_pair(light_ft, dark_ft)); - dark_s = dark_side.full_cell(dark_ft); - int dark_i = dark_side.index_of_covertex(dark_ft); - // mark dark_ft as visited: - // TODO try by marking with Dark_v_handle (vertex) - dark_s->neighbor(dark_i)->set_neighbor(dark_s->mirror_index(dark_i), Dark_s_handle()); - while( ! q.empty() ) - { - std::pair p = q.front(); - q.pop(); - light_ft = p.first; - dark_ft = p.second; - light_s = full_cell(light_ft); - int light_i = index_of_covertex(light_ft); - dark_s = dark_side.full_cell(dark_ft); - int dark_i = dark_side.index_of_covertex(dark_ft); - Full_cell_handle light_n = light_s->neighbor(light_i); - set_neighbors(dark_s->data().light_copy_, dark_i, light_n, light_s->mirror_index(light_i)); - for( int di = 0; di <= current_dimension(); ++di ) - { - if( di == dark_i ) - continue; - int li = light_s->index(dark_s->vertex(di)->data()); - Rotor light_r(light_s, li, light_i); - typename Dark_triangulation::Rotor dark_r(dark_s, di, dark_i); - - while (simps.contains(cpp11::get<0>(light_r)->neighbor(cpp11::get<1>(light_r)))) - light_r = rotate_rotor(light_r); - - while (conflict_zone.contains(cpp11::get<0>(dark_r)->neighbor(cpp11::get<1>(dark_r)))) - dark_r = dark_side.rotate_rotor(dark_r); - - Dark_s_handle dark_ns = cpp11::get<0>(dark_r); - int dark_ni = cpp11::get<1>(dark_r); - Full_cell_handle light_ns = cpp11::get<0>(light_r); - int light_ni = cpp11::get<1>(light_r); - // mark dark_r as visited: - // TODO try by marking with Dark_v_handle (vertex) - Dark_s_handle outside = dark_ns->neighbor(dark_ni); - Dark_v_handle mirror = dark_ns->mirror_vertex(dark_ni, current_dimension()); - int dn = outside->index(mirror); - if( Dark_s_handle() == outside->neighbor(dn) ) - continue; - outside->set_neighbor(dn, Dark_s_handle()); - q.push(std::make_pair(Facet(light_ns, light_ni), Dark_facet(dark_ns, dark_ni))); - } - } - tds().delete_full_cells(simps.begin(), simps.end()); - tds().delete_vertex(v); - return ret_s; -} - -template< typename DCTraits, typename TDS > -void -Delaunay_triangulation -::remove_decrease_dimension(Vertex_handle v) -{ - CGAL_precondition( current_dimension() >= 0 ); - tds().remove_decrease_dimension(v, infinite_vertex()); - // reset the predicates: - reset_flat_orientation(); - if( 1 <= current_dimension() ) - { - Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); - int inf_v_index = inf_v_cell->index(infinite_vertex()); - Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); - Orientation o = orientation(s); - CGAL_assertion( ZERO != o ); - if( NEGATIVE == o ) - reorient_full_cells(); - } -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS - -template< typename DCTraits, typename TDS > -typename Delaunay_triangulation::Vertex_handle -Delaunay_triangulation -::insert(const Point & p, Locate_type lt, const Face & f, const Facet &, Full_cell_handle s) -{ - switch( lt ) - { - case Base::OUTSIDE_AFFINE_HULL: - return insert_outside_affine_hull(p); - break; - case Base::ON_VERTEX: - { - Vertex_handle v = s->vertex(f.index(0)); - v->set_point(p); - return v; - break; - } - default: - if( 1 == current_dimension() ) - { - if( Base::OUTSIDE_CONVEX_HULL == lt ) - { - return insert_outside_convex_hull_1(p, s); - } - Vertex_handle v = tds().insert_in_full_cell(s); - v->set_point(p); - return v; - } - else - return insert_in_conflicting_cell(p, s); - break; - } -} - -/* -[Undocumented function] - -Inserts the point `p` in the Delaunay triangulation. Returns a handle to the -(possibly newly created) vertex at that position. -\pre The point `p` -must lie outside the affine hull of the Delaunay triangulation. This implies that -`dt`.`current_dimension()` must be less than `dt`.`maximal_dimension()`. -*/ -template< typename DCTraits, typename TDS > -typename Delaunay_triangulation::Vertex_handle -Delaunay_triangulation -::insert_outside_affine_hull(const Point & p) -{ - // we don't use Base::insert_outside_affine_hull(...) because here, we - // also need to reset the side_of_oriented_subsphere functor. - CGAL_precondition( current_dimension() < maximal_dimension() ); - Vertex_handle v = tds().insert_increase_dimension(infinite_vertex()); - // reset the predicates: - reset_flat_orientation(); - v->set_point(p); - if( current_dimension() >= 1 ) - { - Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); - int inf_v_index = inf_v_cell->index(infinite_vertex()); - Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); - Orientation o = orientation(s); - CGAL_assertion( ZERO != o ); - if( NEGATIVE == o ) - reorient_full_cells(); - - // We just inserted the second finite point and the right infinite - // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be - // consistent with the rest of the cells - if (current_dimension() == 1) - { - // Is "inf_v_cell" the right infinite cell? - // Then inf_v_index should be 1 - if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 - && inf_v_index == 0) - { - inf_v_cell->swap_vertices( - current_dimension() - 1, current_dimension()); - } - // Otherwise, let's find the right infinite cell - else - { - inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2); - inf_v_index = inf_v_cell->index(infinite_vertex()); - // Is "inf_v_cell" the right infinite cell? - // Then inf_v_index should be 1 - if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 - && inf_v_index == 0) - { - inf_v_cell->swap_vertices( - current_dimension() - 1, current_dimension()); - } - } - } - } - return v; -} - -/*! -[Undocumented function] - -Inserts the point `p` in the Delaunay triangulation. Returns a handle to the -(possibly newly created) vertex at that position. -\pre The point `p` must be in conflict with the full cell `c`. -*/ -template< typename DCTraits, typename TDS > -typename Delaunay_triangulation::Vertex_handle -Delaunay_triangulation -::insert_in_conflicting_cell(const Point & p, Full_cell_handle s) -{ - CGAL_precondition(is_in_conflict(p, s)); - - // for storing conflicting full_cells. - typedef std::vector Full_cell_h_vector; - CGAL_STATIC_THREAD_LOCAL_VARIABLE(Full_cell_h_vector,cs,0); - cs.clear(); - - std::back_insert_iterator out(cs); - Facet ft = compute_conflict_zone(p, s, out); - return insert_in_hole(p, cs.begin(), cs.end(), ft); -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES - -// NOT DOCUMENTED -template< typename DCTraits, typename TDS > -template< typename OrientationPred > -Oriented_side -Delaunay_triangulation -::perturbed_side_of_positive_sphere(const Point & p, Full_cell_const_handle s, - const OrientationPred & ori) const -{ - CGAL_precondition_msg( ! is_infinite(s), "full cell must be finite"); - CGAL_expensive_precondition( POSITIVE == orientation(s) ); - typedef std::vector Points; - Points points(current_dimension() + 2); - int i(0); - for( ; i <= current_dimension(); ++i ) - points[i] = &(s->vertex(i)->point()); - points[i] = &p; - std::sort(points.begin(), points.end(), - internal::Triangulation::Compare_points_for_perturbation(*this)); - typename Points::const_reverse_iterator cut_pt = points.rbegin(); - Points test_points; - while( cut_pt != points.rend() ) - { - if( &p == *cut_pt ) - // because the full_cell "s" is assumed to be positively oriented - return ON_NEGATIVE_SIDE; // we consider |p| to lie outside the sphere - test_points.clear(); - typename Base::Point_const_iterator spit = points_begin(s); - int adjust_sign = -1; - for( i = 0; i < current_dimension(); ++i ) - { - if( &(*spit) == *cut_pt ) - { - ++spit; - adjust_sign = (((current_dimension() + i) % 2) == 0) ? -1 : +1; - } - test_points.push_back(&(*spit)); - ++spit; - } - test_points.push_back(&p); - - typedef typename CGAL::Iterator_project, - const Point &, const Point *> Point_pointer_iterator; - - Orientation ori_value = ori( - Point_pointer_iterator(test_points.begin()), - Point_pointer_iterator(test_points.end())); - - if( ZERO != ori_value ) - return Oriented_side( - adjust_sign * ori_value ); - - ++cut_pt; - } - CGAL_assertion(false); // we should never reach here - return ON_NEGATIVE_SIDE; -} - -template< typename DCTraits, typename TDS > -bool -Delaunay_triangulation -::is_in_conflict(const Point & p, Full_cell_const_handle s) const -{ - CGAL_precondition( 2 <= current_dimension() ); - if( current_dimension() < maximal_dimension() ) - { - Conflict_pred_in_subspace c(*this, p, coaffine_orientation_predicate(), side_of_oriented_subsphere_predicate()); - return c(s); - } - else - { - Orientation_d ori = geom_traits().orientation_d_object(); - Side_of_oriented_sphere_d side = geom_traits().side_of_oriented_sphere_d_object(); - Conflict_pred_in_fullspace c(*this, p, ori, side); - return c(s); - } -} - -template< typename DCTraits, typename TDS > -template< typename OutputIterator > -typename Delaunay_triangulation::Facet -Delaunay_triangulation -::compute_conflict_zone(const Point & p, Full_cell_handle s, OutputIterator out) const -{ - CGAL_precondition( 2 <= current_dimension() ); - if( current_dimension() < maximal_dimension() ) - { - Conflict_pred_in_subspace c(*this, p, coaffine_orientation_predicate(), side_of_oriented_subsphere_predicate()); - Conflict_traversal_pred_in_subspace tp(*this, c); - return tds().gather_full_cells(s, tp, out); - } - else - { - Orientation_d ori = geom_traits().orientation_d_object(); - Side_of_oriented_sphere_d side = geom_traits().side_of_oriented_sphere_d_object(); - Conflict_pred_in_fullspace c(*this, p, ori, side); - Conflict_traversal_pred_in_fullspace tp(*this, c); - return tds().gather_full_cells(s, tp, out); - } -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY - -template< typename DCTraits, typename TDS > -bool -Delaunay_triangulation -::is_valid(bool verbose, int level) const -{ - if (!Base::is_valid(verbose, level)) - return false; - - int dim = current_dimension(); - if (dim == maximal_dimension()) - { - for (Finite_full_cell_const_iterator cit = this->finite_full_cells_begin() ; - cit != this->finite_full_cells_end() ; ++cit ) - { - Full_cell_const_handle ch = cit.base(); - for(int i = 0; i < dim+1 ; ++i ) - { - // If the i-th neighbor is not an infinite cell - Vertex_handle opposite_vh = - ch->neighbor(i)->vertex(ch->neighbor(i)->index(ch)); - if (!is_infinite(opposite_vh)) - { - Side_of_oriented_sphere_d side = - geom_traits().side_of_oriented_sphere_d_object(); - if (side(Point_const_iterator(ch->vertices_begin()), - Point_const_iterator(ch->vertices_end()), - opposite_vh->point()) == ON_BOUNDED_SIDE) - { - if (verbose) - CGAL_warning_msg(false, "Non-empty sphere"); - return false; - } - } - } - } - } - return true; -} - - -} //namespace CGAL - -#endif // CGAL_DELAUNAY_COMPLEX_H diff --git a/src/common/include/gudhi_patches/CGAL/Epeck_d.h b/src/common/include/gudhi_patches/CGAL/Epeck_d.h deleted file mode 100644 index 52bce84c..00000000 --- a/src/common/include/gudhi_patches/CGAL/Epeck_d.h +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_EPECK_D_H -#define CGAL_EPECK_D_H -#include -#include -#include -#include - - -namespace CGAL { -#define CGAL_BASE \ - Cartesian_base_d::Type, Dim> -template -struct Epeck_d_help1 -: CGAL_BASE -{ - CGAL_CONSTEXPR Epeck_d_help1(){} - CGAL_CONSTEXPR Epeck_d_help1(int d):CGAL_BASE(d){} -}; -#undef CGAL_BASE -#define CGAL_BASE \ - Kernel_d_interface< \ - Cartesian_wrap< \ - Epeck_d_help1, \ - Epeck_d > > -template -struct Epeck_d -: CGAL_BASE -{ - CGAL_CONSTEXPR Epeck_d(){} - CGAL_CONSTEXPR Epeck_d(int d):CGAL_BASE(d){} -}; -#undef CGAL_BASE -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/Epick_d.h b/src/common/include/gudhi_patches/CGAL/Epick_d.h deleted file mode 100644 index 64438539..00000000 --- a/src/common/include/gudhi_patches/CGAL/Epick_d.h +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_EPICK_D_H -#define CGAL_EPICK_D_H -#include -#include -#include -#include -#include -#include -#include -#include - - -namespace CGAL { -#define CGAL_BASE \ - Cartesian_filter_K< \ - Cartesian_base_d, \ - Cartesian_base_d, \ - Cartesian_base_d::Type, Dim> \ - > -template -struct Epick_d_help1 -: CGAL_BASE -{ - CGAL_CONSTEXPR Epick_d_help1(){} - CGAL_CONSTEXPR Epick_d_help1(int d):CGAL_BASE(d){} -}; -#undef CGAL_BASE -#define CGAL_BASE \ - Cartesian_static_filters,Epick_d_help2 > -template -struct Epick_d_help2 -: CGAL_BASE -{ - CGAL_CONSTEXPR Epick_d_help2(){} - CGAL_CONSTEXPR Epick_d_help2(int d):CGAL_BASE(d){} -}; -#undef CGAL_BASE -#define CGAL_BASE \ - Kernel_d_interface< \ - Cartesian_wrap< \ - Epick_d_help2, \ - Epick_d > > -template -struct Epick_d -: CGAL_BASE -{ - CGAL_CONSTEXPR Epick_d(){} - CGAL_CONSTEXPR Epick_d(int d):CGAL_BASE(d){} -}; -#undef CGAL_BASE -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h b/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h deleted file mode 100644 index 701f0820..00000000 --- a/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h +++ /dev/null @@ -1,320 +0,0 @@ -// Copyright (c) 2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL: $ -// $Id: $ -// -// Author(s) : Clement Jamin - - -#ifndef CGAL_TRIANGULATION_IO_H -#define CGAL_TRIANGULATION_IO_H - -#include -#include -#include -#include - -namespace CGAL { - -namespace Triangulation_IO -{ -// TODO: test if the stream is binary or text? -template -int -output_point(std::ostream & os, const Traits &traits, const P & p) -{ - typedef typename Traits::Compute_coordinate_d Ccd; - const Ccd ccd = traits.compute_coordinate_d_object(); - const int dim = traits.point_dimension_d_object()(p); - if (dim > 0) - { - os << ccd(p, 0); - for (int i = 1 ; i < dim ; ++i) - os << " " << CGAL::to_double(ccd(p, i)); - } - return dim; -} - -// TODO: test if the stream is binary or text? -template -int -output_weighted_point(std::ostream & os, const Traits &traits, const P & p, - bool output_weight = true) -{ - typedef typename Traits::Compute_coordinate_d Ccd; - typename Traits::Construct_point_d cp = - traits.construct_point_d_object(); - typename Traits::Compute_weight_d pt_weight = traits.compute_weight_d_object(); - const Ccd ccd = traits.compute_coordinate_d_object(); - const int dim = traits.point_dimension_d_object()(p); - if (dim > 0) - { - output_point(os, traits, p); - if (output_weight) - os << " " << pt_weight(p); - } - return dim; -} - -// TODO: test if the stream is binary or text? -template -void -output_full_cell(std::ostream & os, const Traits &traits, const FCH & fch, - bool output_weights = false) -{ - typename FCH::value_type::Vertex_handle_iterator vit = fch->vertices_begin(); - for( ; vit != fch->vertices_end(); ++vit ) - { - int dim; - if (output_weights) - dim = output_weighted_point(os, traits, (*vit)->point()); - else - dim = output_point(os, traits, (*vit)->point()); - if (dim > 0) - os << std::endl; - } -} - -// TODO: test if the stream is binary or text? -/*template -void -input_point(std::istream & is, const Traits &traits, P & p) -{ - typedef typename Traits::FT FT; - std::vector coords; - - std::string line; - for(;;) - { - if (!std::getline(is, line)) - return is; - if (line != "") - break; - } - std::stringstream line_sstr(line); - FT temp; - while (line_sstr >> temp) - coords.push_back(temp); - - p = traits.construct_point_d_object()(coords.begin(), coords.end()); -}*/ - -} // namespace Triangulation_IO - -/////////////////////////////////////////////////////////////// -// TODO: replace these operator>> by an "input_point" function -/////////////////////////////////////////////////////////////// - -// TODO: test if the stream is binary or text? -template -std::istream & -operator>>(std::istream &is, typename Wrap::Point_d & p) -{ - typedef typename Wrap::Point_d P; - typedef typename K::FT FT; - std::vector coords; - - std::string line; - for(;;) - { - if (!std::getline(is, line)) - return is; - if (line != "") - break; - } - std::stringstream line_sstr(line); - FT temp; - while (line_sstr >> temp) - coords.push_back(temp); - - p = P(coords.begin(), coords.end()); - return is; -} - -// TODO: test if the stream is binary or text? -template -std::istream & -operator>>(std::istream &is, typename Wrap::Weighted_point_d & wp) -{ - typedef typename Wrap::Point_d P; - typedef typename Wrap::Weighted_point_d WP; - typedef typename K::FT FT; - - std::string line; - for(;;) - { - if (!std::getline(is, line)) - return is; - if (line != "") - break; - } - std::stringstream line_sstr(line); - FT temp; - std::vector coords; - while (line_sstr >> temp) - coords.push_back(temp); - - typename std::vector::iterator last = coords.end() - 1; - P p = P(coords.begin(), last); - wp = WP(p, *last); - - return is; -} - -// TODO: test if the stream is binary or text? -template -std::istream & -operator>>(std::istream &is, typename Wrap::Vector_d & v) -{ - typedef typename Wrap::Vector_d V; - typedef typename K::FT FT; - std::vector coords; - - std::string line; - for (;;) - { - if (!std::getline(is, line)) - return is; - if (line != "") - break; - } - std::stringstream line_sstr(line); - FT temp; - while (line_sstr >> temp) - coords.push_back(temp); - - v = V(coords.begin(), coords.end()); - return is; -} - -template < class GT, class TDS > -std::ostream & -export_triangulation_to_off(std::ostream & os, - const Triangulation & tr, - bool in_3D_export_surface_only = false) -{ - typedef Triangulation Tr; - typedef typename Tr::Vertex_const_handle Vertex_handle; - typedef typename Tr::Finite_vertex_const_iterator Finite_vertex_iterator; - typedef typename Tr::Finite_full_cell_const_iterator Finite_full_cell_iterator; - typedef typename Tr::Full_cell_const_iterator Full_cell_iterator; - typedef typename Tr::Full_cell Full_cell; - typedef typename Full_cell::Vertex_handle_const_iterator Full_cell_vertex_iterator; - - if (tr.maximal_dimension() < 2 || tr.maximal_dimension() > 3) - { - std::cerr << "Warning: export_tds_to_off => dimension should be 2 or 3."; - os << "Warning: export_tds_to_off => dimension should be 2 or 3."; - return os; - } - - size_t n = tr.number_of_vertices(); - - std::stringstream output; - - // write the vertices - std::map index_of_vertex; - int i = 0; - for(Finite_vertex_iterator it = tr.finite_vertices_begin(); - it != tr.finite_vertices_end(); ++it, ++i) - { - Triangulation_IO::output_point(output, tr.geom_traits(), it->point()); - if (tr.maximal_dimension() == 2) - output << " 0"; - output << std::endl; - index_of_vertex[it.base()] = i; - } - CGAL_assertion( i == n ); - - size_t number_of_triangles = 0; - if (tr.maximal_dimension() == 2) - { - for (Finite_full_cell_iterator fch = tr.finite_full_cells_begin() ; - fch != tr.finite_full_cells_end() ; ++fch) - { - output << "3 "; - for (Full_cell_vertex_iterator vit = fch->vertices_begin() ; - vit != fch->vertices_end() ; ++vit) - { - output << index_of_vertex[*vit] << " "; - } - output << std::endl; - ++number_of_triangles; - } - } - else if (tr.maximal_dimension() == 3) - { - if (in_3D_export_surface_only) - { - // Parse boundary facets - for (Full_cell_iterator fch = tr.full_cells_begin() ; - fch != tr.full_cells_end() ; ++fch) - { - if (tr.is_infinite(fch)) - { - output << "3 "; - for (Full_cell_vertex_iterator vit = fch->vertices_begin() ; - vit != fch->vertices_end() ; ++vit) - { - if (!tr.is_infinite(*vit)) - output << index_of_vertex[*vit] << " "; - } - output << std::endl; - ++number_of_triangles; - } - } - } - else - { - // Parse finite cells - for (Finite_full_cell_iterator fch = tr.finite_full_cells_begin() ; - fch != tr.finite_full_cells_end() ; ++fch) - { - output << "3 " - << index_of_vertex[fch->vertex(0)] << " " - << index_of_vertex[fch->vertex(1)] << " " - << index_of_vertex[fch->vertex(2)] - << std::endl; - output << "3 " - << index_of_vertex[fch->vertex(0)] << " " - << index_of_vertex[fch->vertex(2)] << " " - << index_of_vertex[fch->vertex(3)] - << std::endl; - output << "3 " - << index_of_vertex[fch->vertex(1)] << " " - << index_of_vertex[fch->vertex(2)] << " " - << index_of_vertex[fch->vertex(3)] - << std::endl; - output << "3 " - << index_of_vertex[fch->vertex(0)] << " " - << index_of_vertex[fch->vertex(1)] << " " - << index_of_vertex[fch->vertex(3)] - << std::endl; - number_of_triangles += 4; - } - } - } - - os << "OFF \n" - << n << " " - << number_of_triangles << " 0\n" - << output.str(); - - return os; -} - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_IO_H diff --git a/src/common/include/gudhi_patches/CGAL/Kd_tree.h b/src/common/include/gudhi_patches/CGAL/Kd_tree.h deleted file mode 100644 index f085b0da..00000000 --- a/src/common/include/gudhi_patches/CGAL/Kd_tree.h +++ /dev/null @@ -1,582 +0,0 @@ -// Copyright (c) 2002,2011,2014 Utrecht University (The Netherlands), Max-Planck-Institute Saarbruecken (Germany). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Hans Tangelder (), -// : Waqar Khan - -#ifndef CGAL_KD_TREE_H -#define CGAL_KD_TREE_H - -#include "Kd_tree_node.h" - -#include -#include -#include - -#include -#include -#include - - -#include -#include -#include - -#ifdef CGAL_HAS_THREADS -#include -#endif - -namespace CGAL { - -//template , class UseExtendedNode = Tag_true > -template , class UseExtendedNode = Tag_true > -class Kd_tree { - -public: - typedef SearchTraits Traits; - typedef Splitter_ Splitter; - typedef typename SearchTraits::Point_d Point_d; - typedef typename Splitter::Container Point_container; - - typedef typename SearchTraits::FT FT; - typedef Kd_tree_node Node; - typedef Kd_tree_leaf_node Leaf_node; - typedef Kd_tree_internal_node Internal_node; - typedef Kd_tree Tree; - typedef Kd_tree Self; - - typedef Node* Node_handle; - typedef const Node* Node_const_handle; - typedef Leaf_node* Leaf_node_handle; - typedef const Leaf_node* Leaf_node_const_handle; - typedef Internal_node* Internal_node_handle; - typedef const Internal_node* Internal_node_const_handle; - typedef typename std::vector::const_iterator Point_d_iterator; - typedef typename std::vector::const_iterator Point_d_const_iterator; - typedef typename Splitter::Separator Separator; - typedef typename std::vector::const_iterator iterator; - typedef typename std::vector::const_iterator const_iterator; - - typedef typename std::vector::size_type size_type; - - typedef typename internal::Get_dimension_tag::Dimension D; - -private: - SearchTraits traits_; - Splitter split; - - - // wokaround for https://svn.boost.org/trac/boost/ticket/9332 -#if (_MSC_VER == 1800) && (BOOST_VERSION == 105500) - std::deque internal_nodes; - std::deque leaf_nodes; -#else - boost::container::deque internal_nodes; - boost::container::deque leaf_nodes; -#endif - - Node_handle tree_root; - - Kd_tree_rectangle* bbox; - std::vector pts; - - // Instead of storing the points in arrays in the Kd_tree_node - // we put all the data in a vector in the Kd_tree. - // and we only store an iterator range in the Kd_tree_node. - // - std::vector data; - - - #ifdef CGAL_HAS_THREADS - mutable CGAL_MUTEX building_mutex;//mutex used to protect const calls inducing build() - #endif - bool built_; - bool removed_; - - // protected copy constructor - Kd_tree(const Tree& tree) - : traits_(tree.traits_),built_(tree.built_) - {}; - - - // Instead of the recursive construction of the tree in the class Kd_tree_node - // we do this in the tree class. The advantage is that we then can optimize - // the allocation of the nodes. - - // The leaf node - Node_handle - create_leaf_node(Point_container& c) - { - Leaf_node node(true , static_cast(c.size())); - std::ptrdiff_t tmp = c.begin() - data.begin(); - node.data = pts.begin() + tmp; - - leaf_nodes.push_back(node); - Leaf_node_handle nh = &leaf_nodes.back(); - - - return nh; - } - - - // The internal node - - Node_handle - create_internal_node(Point_container& c, const Tag_true&) - { - return create_internal_node_use_extension(c); - } - - Node_handle - create_internal_node(Point_container& c, const Tag_false&) - { - return create_internal_node(c); - } - - - - // TODO: Similiar to the leaf_init function above, a part of the code should be - // moved to a the class Kd_tree_node. - // It is not proper yet, but the goal was to see if there is - // a potential performance gain through the Compact_container - Node_handle - create_internal_node_use_extension(Point_container& c) - { - Internal_node node(false); - internal_nodes.push_back(node); - Internal_node_handle nh = &internal_nodes.back(); - - Separator sep; - Point_container c_low(c.dimension(),traits_); - split(sep, c, c_low); - nh->set_separator(sep); - - int cd = nh->cutting_dimension(); - if(!c_low.empty()){ - nh->lower_low_val = c_low.tight_bounding_box().min_coord(cd); - nh->lower_high_val = c_low.tight_bounding_box().max_coord(cd); - } - else{ - nh->lower_low_val = nh->cutting_value(); - nh->lower_high_val = nh->cutting_value(); - } - if(!c.empty()){ - nh->upper_low_val = c.tight_bounding_box().min_coord(cd); - nh->upper_high_val = c.tight_bounding_box().max_coord(cd); - } - else{ - nh->upper_low_val = nh->cutting_value(); - nh->upper_high_val = nh->cutting_value(); - } - - CGAL_assertion(nh->cutting_value() >= nh->lower_low_val); - CGAL_assertion(nh->cutting_value() <= nh->upper_high_val); - - if (c_low.size() > split.bucket_size()){ - nh->lower_ch = create_internal_node_use_extension(c_low); - }else{ - nh->lower_ch = create_leaf_node(c_low); - } - if (c.size() > split.bucket_size()){ - nh->upper_ch = create_internal_node_use_extension(c); - }else{ - nh->upper_ch = create_leaf_node(c); - } - - - - - return nh; - } - - - // Note also that I duplicated the code to get rid if the if's for - // the boolean use_extension which was constant over the construction - Node_handle - create_internal_node(Point_container& c) - { - Internal_node node(false); - internal_nodes.push_back(node); - Internal_node_handle nh = &internal_nodes.back(); - Separator sep; - - Point_container c_low(c.dimension(),traits_); - split(sep, c, c_low); - nh->set_separator(sep); - - if (c_low.size() > split.bucket_size()){ - nh->lower_ch = create_internal_node(c_low); - }else{ - nh->lower_ch = create_leaf_node(c_low); - } - if (c.size() > split.bucket_size()){ - nh->upper_ch = create_internal_node(c); - }else{ - nh->upper_ch = create_leaf_node(c); - } - - - - return nh; - } - - - -public: - - Kd_tree(Splitter s = Splitter(),const SearchTraits traits=SearchTraits()) - : traits_(traits),split(s), built_(false), removed_(false) - {} - - template - Kd_tree(InputIterator first, InputIterator beyond, - Splitter s = Splitter(),const SearchTraits traits=SearchTraits()) - : traits_(traits),split(s), built_(false), removed_(false) - { - pts.insert(pts.end(), first, beyond); - } - - bool empty() const { - return pts.empty(); - } - - void - build() - { - // This function is not ready to be called when a tree already exists, one - // must call invalidate_built() first. - CGAL_assertion(!is_built()); - CGAL_assertion(!removed_); - const Point_d& p = *pts.begin(); - typename SearchTraits::Construct_cartesian_const_iterator_d ccci=traits_.construct_cartesian_const_iterator_d_object(); - int dim = static_cast(std::distance(ccci(p), ccci(p,0))); - - data.reserve(pts.size()); - for(unsigned int i = 0; i < pts.size(); i++){ - data.push_back(&pts[i]); - } - Point_container c(dim, data.begin(), data.end(),traits_); - bbox = new Kd_tree_rectangle(c.bounding_box()); - if (c.size() <= split.bucket_size()){ - tree_root = create_leaf_node(c); - }else { - tree_root = create_internal_node(c, UseExtendedNode()); - } - - //Reorder vector for spatial locality - std::vector ptstmp; - ptstmp.resize(pts.size()); - for (std::size_t i = 0; i < pts.size(); ++i){ - ptstmp[i] = *data[i]; - } - for(std::size_t i = 0; i < leaf_nodes.size(); ++i){ - std::ptrdiff_t tmp = leaf_nodes[i].begin() - pts.begin(); - leaf_nodes[i].data = ptstmp.begin() + tmp; - } - pts.swap(ptstmp); - - data.clear(); - - built_ = true; - } - -private: - //any call to this function is for the moment not threadsafe - void const_build() const { - #ifdef CGAL_HAS_THREADS - //this ensure that build() will be called once - CGAL_SCOPED_LOCK(building_mutex); - if(!is_built()) - #endif - const_cast(this)->build(); //THIS IS NOT THREADSAFE - } -public: - - bool is_built() const - { - return built_; - } - - void invalidate_built() - { - if(removed_){ - // Walk the tree to collect the remaining points. - // Writing directly to pts would likely work, but better be safe. - std::vector ptstmp; - //ptstmp.resize(root()->num_items()); - root()->tree_items(std::back_inserter(ptstmp)); - pts.swap(ptstmp); - removed_=false; - CGAL_assertion(is_built()); // the rest of the cleanup must happen - } - if(is_built()){ - internal_nodes.clear(); - leaf_nodes.clear(); - data.clear(); - delete bbox; - built_ = false; - } - } - - void clear() - { - invalidate_built(); - pts.clear(); - removed_ = false; - } - - void - insert(const Point_d& p) - { - invalidate_built(); - pts.push_back(p); - } - - template - void - insert(InputIterator first, InputIterator beyond) - { - invalidate_built(); - pts.insert(pts.end(),first, beyond); - } - -private: - struct Equal_by_coordinates { - SearchTraits const* traits; - Point_d const* pp; - bool operator()(Point_d const&q) const { - typename SearchTraits::Construct_cartesian_const_iterator_d ccci=traits->construct_cartesian_const_iterator_d_object(); - return std::equal(ccci(*pp), ccci(*pp,0), ccci(q)); - } - }; - Equal_by_coordinates equal_by_coordinates(Point_d const&p){ - Equal_by_coordinates ret = { &traits(), &p }; - return ret; - } - -public: - void - remove(const Point_d& p) - { - remove(p, equal_by_coordinates(p)); - } - - template - void - remove(const Point_d& p, Equal const& equal_to_p) - { -#if 0 - // This code could have quadratic runtime. - if (!is_built()) { - std::vector::iterator pi = std::find(pts.begin(), pts.end(), p); - // Precondition: the point must be there. - CGAL_assertion (pi != pts.end()); - pts.erase(pi); - return; - } -#endif - bool success = remove_(p, 0, false, 0, false, root(), equal_to_p); - CGAL_assertion(success); - - // Do not set the flag is the tree has been cleared. - if(is_built()) - removed_ |= success; - } -private: - template - bool remove_(const Point_d& p, - Internal_node_handle grandparent, bool parent_islower, - Internal_node_handle parent, bool islower, - Node_handle node, Equal const& equal_to_p) { - // Recurse to locate the point - if (!node->is_leaf()) { - Internal_node_handle newparent = static_cast(node); - // FIXME: This should be if(xcutting_dimension()] <= newparent->cutting_value()) { - if (remove_(p, parent, islower, newparent, true, newparent->lower(), equal_to_p)) - return true; - } - //if (traits().construct_cartesian_const_iterator_d_object()(p)[newparent->cutting_dimension()] >= newparent->cutting_value()) - return remove_(p, parent, islower, newparent, false, newparent->upper(), equal_to_p); - - CGAL_assertion(false); // Point was not found - } - - // Actual removal - Leaf_node_handle lnode = static_cast(node); - if (lnode->size() > 1) { - iterator pi = std::find_if(lnode->begin(), lnode->end(), equal_to_p); - // FIXME: we should ensure this never happens - if (pi == lnode->end()) return false; - iterator lasti = lnode->end() - 1; - if (pi != lasti) { - // Hack to get a non-const iterator - std::iter_swap(pts.begin()+(pi-pts.begin()), pts.begin()+(lasti-pts.begin())); - } - lnode->drop_last_point(); - } else if (!equal_to_p(*lnode->begin())) { - // FIXME: we should ensure this never happens - return false; - } else if (grandparent) { - Node_handle brother = islower ? parent->upper() : parent->lower(); - if (parent_islower) - grandparent->set_lower(brother); - else - grandparent->set_upper(brother); - } else if (parent) { - tree_root = islower ? parent->upper() : parent->lower(); - } else { - clear(); - } - return true; - } - -public: - //For efficiency; reserve the size of the points vectors in advance (if the number of points is already known). - void reserve(size_t size) - { - pts.reserve(size); - } - - //Get the capacity of the underlying points vector. - size_t capacity() - { - return pts.capacity(); - } - - - template - OutputIterator - search(OutputIterator it, const FuzzyQueryItem& q) const - { - if(! pts.empty()){ - - if(! is_built()){ - const_build(); - } - Kd_tree_rectangle b(*bbox); - return tree_root->search(it,q,b); - } - return it; - } - - - template - boost::optional - search_any_point(const FuzzyQueryItem& q) const - { - if(! pts.empty()){ - - if(! is_built()){ - const_build(); - } - Kd_tree_rectangle b(*bbox); - return tree_root->search_any_point(q,b); - } - return boost::none; - } - - - ~Kd_tree() { - if(is_built()){ - delete bbox; - } - } - - - const SearchTraits& - traits() const - { - return traits_; - } - - Node_const_handle - root() const - { - if(! is_built()){ - const_build(); - } - return tree_root; - } - - Node_handle - root() - { - if(! is_built()){ - build(); - } - return tree_root; - } - - void - print() const - { - if(! is_built()){ - const_build(); - } - root()->print(); - } - - const Kd_tree_rectangle& - bounding_box() const - { - if(! is_built()){ - const_build(); - } - return *bbox; - } - - const_iterator - begin() const - { - return pts.begin(); - } - - const_iterator - end() const - { - return pts.end(); - } - - size_type - size() const - { - return pts.size(); - } - - // Print statistics of the tree. - std::ostream& - statistics(std::ostream& s) const - { - if(! is_built()){ - const_build(); - } - s << "Tree statistics:" << std::endl; - s << "Number of items stored: " - << root()->num_items() << std::endl; - s << "Number of nodes: " - << root()->num_nodes() << std::endl; - s << " Tree depth: " << root()->depth() << std::endl; - return s; - } - - -}; - -} // namespace CGAL - -#endif // CGAL_KD_TREE_H diff --git a/src/common/include/gudhi_patches/CGAL/Kd_tree_node.h b/src/common/include/gudhi_patches/CGAL/Kd_tree_node.h deleted file mode 100644 index 909ee260..00000000 --- a/src/common/include/gudhi_patches/CGAL/Kd_tree_node.h +++ /dev/null @@ -1,586 +0,0 @@ -// Copyright (c) 2002,2011 Utrecht University (The Netherlands). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// -// Authors : Hans Tangelder () - -#ifndef CGAL_KD_TREE_NODE_H -#define CGAL_KD_TREE_NODE_H - -#include "CGAL/Splitters.h" - -#include -#include - -namespace CGAL { - - template - class Kd_tree; - - template < class TreeTraits, class Splitter, class UseExtendedNode > - class Kd_tree_node { - - friend class Kd_tree; - - typedef typename Kd_tree::Node_handle Node_handle; - typedef typename Kd_tree::Node_const_handle Node_const_handle; - typedef typename Kd_tree::Internal_node_handle Internal_node_handle; - typedef typename Kd_tree::Internal_node_const_handle Internal_node_const_handle; - typedef typename Kd_tree::Leaf_node_handle Leaf_node_handle; - typedef typename Kd_tree::Leaf_node_const_handle Leaf_node_const_handle; - typedef typename TreeTraits::Point_d Point_d; - - typedef typename TreeTraits::FT FT; - typedef typename Kd_tree::Separator Separator; - typedef typename Kd_tree::Point_d_iterator Point_d_iterator; - typedef typename Kd_tree::iterator iterator; - typedef typename Kd_tree::D D; - - bool leaf; - - public : - Kd_tree_node(bool leaf_) - :leaf(leaf_){} - - bool is_leaf() const{ - return leaf; - } - - std::size_t - num_items() const - { - if (is_leaf()){ - Leaf_node_const_handle node = - static_cast(this); - return node->size(); - } - else { - Internal_node_const_handle node = - static_cast(this); - return node->lower()->num_items() + node->upper()->num_items(); - } - } - - std::size_t - num_nodes() const - { - if (is_leaf()) return 1; - else { - Internal_node_const_handle node = - static_cast(this); - return node->lower()->num_nodes() + node->upper()->num_nodes(); - } - } - - int - depth(const int current_max_depth) const - { - if (is_leaf()){ - return current_max_depth; - } - else { - Internal_node_const_handle node = - static_cast(this); - return - (std::max)( node->lower()->depth(current_max_depth + 1), - node->upper()->depth(current_max_depth + 1)); - } - } - - int - depth() const - { - return depth(1); - } - - template - OutputIterator - tree_items(OutputIterator it) const { - if (is_leaf()) { - Leaf_node_const_handle node = - static_cast(this); - if (node->size()>0) - for (iterator i=node->begin(); i != node->end(); i++) - {*it=*i; ++it;} - } - else { - Internal_node_const_handle node = - static_cast(this); - it=node->lower()->tree_items(it); - it=node->upper()->tree_items(it); - } - return it; - } - - - boost::optional - any_tree_item() const { - boost::optional result = boost::none; - if (is_leaf()) { - Leaf_node_const_handle node = - static_cast(this); - if (node->size()>0){ - return boost::make_optional(*(node->begin())); - } - } - else { - Internal_node_const_handle node = - static_cast(this); - result = node->lower()->any_tree_item(); - if(! result){ - result = node->upper()->any_tree_item(); - } - } - return result; - } - - - void - indent(int d) const - { - for(int i = 0; i < d; i++){ - std::cout << " "; - } - } - - - void - print(int d = 0) const - { - if (is_leaf()) { - Leaf_node_const_handle node = - static_cast(this); - indent(d); - std::cout << "leaf" << std::endl; - if (node->size()>0) - for (iterator i=node->begin(); i != node->end(); i++) - {indent(d);std::cout << *i << std::endl;} - } - else { - Internal_node_const_handle node = - static_cast(this); - indent(d); - std::cout << "lower tree" << std::endl; - node->lower()->print(d+1); - indent(d); - std::cout << "separator: dim = " << node->cutting_dimension() << " val = " << node->cutting_value() << std::endl; - indent(d); - std::cout << "upper tree" << std::endl; - node->upper()->print(d+1); - } - } - - - template - OutputIterator - search(OutputIterator it, const FuzzyQueryItem& q, - Kd_tree_rectangle& b) const - { - if (is_leaf()) { - Leaf_node_const_handle node = - static_cast(this); - if (node->size()>0) - for (iterator i=node->begin(); i != node->end(); i++) - if (q.contains(*i)) - {*it++=*i;} - } - else { - Internal_node_const_handle node = - static_cast(this); - // after splitting b denotes the lower part of b - Kd_tree_rectangle b_upper(b); - b.split(b_upper, node->cutting_dimension(), - node->cutting_value()); - - if (q.outer_range_contains(b)) - it=node->lower()->tree_items(it); - else - if (q.inner_range_intersects(b)) - it=node->lower()->search(it,q,b); - if (q.outer_range_contains(b_upper)) - it=node->upper()->tree_items(it); - else - if (q.inner_range_intersects(b_upper)) - it=node->upper()->search(it,q,b_upper); - }; - return it; - } - - - template - boost::optional - search_any_point(const FuzzyQueryItem& q, - Kd_tree_rectangle& b) const - { - boost::optional result = boost::none; - if (is_leaf()) { - Leaf_node_const_handle node = - static_cast(this); - if (node->size()>0) - for (iterator i=node->begin(); i != node->end(); i++) - if (q.contains(*i)) - { result = *i; break; } - } - else { - Internal_node_const_handle node = - static_cast(this); - // after splitting b denotes the lower part of b - Kd_tree_rectangle b_upper(b); - b.split(b_upper, node->cutting_dimension(), - node->cutting_value()); - - if (q.outer_range_contains(b)){ - result = node->lower()->any_tree_item(); - }else{ - if (q.inner_range_intersects(b)){ - result = node->lower()->search_any_point(q,b); - } - } - if(result){ - return result; - } - if (q.outer_range_contains(b_upper)){ - result = node->upper()->any_tree_item(); - }else{ - if (q.inner_range_intersects(b_upper)) - result = node->upper()->search_any_point(q,b_upper); - } - } - return result; - } - - }; - - - template < class TreeTraits, class Splitter, class UseExtendedNode > - class Kd_tree_leaf_node : public Kd_tree_node< TreeTraits, Splitter, UseExtendedNode >{ - - friend class Kd_tree; - - typedef typename Kd_tree::iterator iterator; - typedef Kd_tree_node< TreeTraits, Splitter, UseExtendedNode> Base; - typedef typename TreeTraits::Point_d Point_d; - - private: - - // private variables for leaf nodes - boost::int32_t n; // denotes number of items in a leaf node - iterator data; // iterator to data in leaf node - - - public: - - // default constructor - Kd_tree_leaf_node() - {} - - Kd_tree_leaf_node(bool leaf_ ) - : Base(leaf_) - {} - - Kd_tree_leaf_node(bool leaf_,unsigned int n_ ) - : Base(leaf_), n(n_) - {} - - // members for all nodes - - // members for leaf nodes only - inline - unsigned int - size() const - { - return n; - } - - inline - iterator - begin() const - { - return data; - } - - inline - iterator - end() const - { - return data + n; - } - - inline - void - drop_last_point() - { - --n; - } - - }; //leaf node - - - - template < class TreeTraits, class Splitter, class UseExtendedNode> - class Kd_tree_internal_node : public Kd_tree_node< TreeTraits, Splitter, UseExtendedNode >{ - - friend class Kd_tree; - - typedef Kd_tree_node< TreeTraits, Splitter, UseExtendedNode> Base; - typedef typename Kd_tree::Node_handle Node_handle; - typedef typename Kd_tree::Node_const_handle Node_const_handle; - - typedef typename TreeTraits::FT FT; - typedef typename Kd_tree::Separator Separator; - - private: - - // private variables for internal nodes - boost::int32_t cut_dim; - FT cut_val; - Node_handle lower_ch, upper_ch; - - - // private variables for extended internal nodes - FT upper_low_val; - FT upper_high_val; - FT lower_low_val; - FT lower_high_val; - - - public: - - // default constructor - Kd_tree_internal_node() - {} - - Kd_tree_internal_node(bool leaf_) - : Base(leaf_) - {} - - - // members for internal node and extended internal node - - inline - Node_const_handle - lower() const - { - return lower_ch; - } - - inline - Node_const_handle - upper() const - { - return upper_ch; - } - - inline - Node_handle - lower() - { - return lower_ch; - } - - inline - Node_handle - upper() - { - return upper_ch; - } - - inline - void - set_lower(Node_handle nh) - { - lower_ch = nh; - } - - inline - void - set_upper(Node_handle nh) - { - upper_ch = nh; - } - - // inline Separator& separator() {return sep; } - // use instead - inline - void set_separator(Separator& sep){ - cut_dim = sep.cutting_dimension(); - cut_val = sep.cutting_value(); - } - - inline - FT - cutting_value() const - { - return cut_val; - } - - inline - int - cutting_dimension() const - { - return cut_dim; - } - - // members for extended internal node only - inline - FT - upper_low_value() const - { - return upper_low_val; - } - - inline - FT - upper_high_value() const - { - return upper_high_val; - } - - inline - FT - lower_low_value() const - { - return lower_low_val; - } - - inline - FT - lower_high_value() const - { - return lower_high_val; - } - - /*Separator& - separator() - { - return Separator(cutting_dimension,cutting_value); - }*/ - - - };//internal node - - template < class TreeTraits, class Splitter> - class Kd_tree_internal_node : public Kd_tree_node< TreeTraits, Splitter, Tag_false >{ - - friend class Kd_tree; - - typedef Kd_tree_node< TreeTraits, Splitter, Tag_false> Base; - typedef typename Kd_tree::Node_handle Node_handle; - typedef typename Kd_tree::Node_const_handle Node_const_handle; - - typedef typename TreeTraits::FT FT; - typedef typename Kd_tree::Separator Separator; - - private: - - // private variables for internal nodes - boost::uint8_t cut_dim; - FT cut_val; - - Node_handle lower_ch, upper_ch; - - public: - - // default constructor - Kd_tree_internal_node() - {} - - Kd_tree_internal_node(bool leaf_) - : Base(leaf_) - {} - - - // members for internal node and extended internal node - - inline - Node_const_handle - lower() const - { - return lower_ch; - } - - inline - Node_const_handle - upper() const - { - return upper_ch; - } - - inline - Node_handle - lower() - { - return lower_ch; - } - - inline - Node_handle - upper() - { - return upper_ch; - } - - inline - void - set_lower(Node_handle nh) - { - lower_ch = nh; - } - - inline - void - set_upper(Node_handle nh) - { - upper_ch = nh; - } - - // inline Separator& separator() {return sep; } - // use instead - - inline - void set_separator(Separator& sep){ - cut_dim = sep.cutting_dimension(); - cut_val = sep.cutting_value(); - } - - inline - FT - cutting_value() const - { - return cut_val; - } - - inline - int - cutting_dimension() const - { - return cut_dim; - } - - /* Separator& - separator() - { - return Separator(cutting_dimension,cutting_value); - }*/ - - - };//internal node - - - -} // namespace CGAL -#endif // CGAL_KDTREE_NODE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h deleted file mode 100644 index c13a9801..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_LA_BASE_H -#define CGAL_KERNEL_D_CARTESIAN_LA_BASE_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#ifdef CGAL_EIGEN3_ENABLED -#include -#else -#error Eigen3 is required -#endif - -namespace CGAL { - -template < typename FT_, typename Dim_, -#if 1 - typename Vec_=Mix_vector, - Vector_vector, - FT_, Dim_>, -#elif 0 - typename Vec_=Array_vector, -#elif 0 - typename Vec_=Vector_vector, -#else - // Dangerous because of alignment. Ok on x86_64 without AVX. - typename Vec_=LA_eigen, -#endif - typename LA_=LA_eigen > - /* Default LA to Vec or to LA_eigen? */ -struct Cartesian_LA_base_d : public Dimension_base -{ - typedef Cartesian_LA_base_d Self; - typedef Cartesian_tag Rep_tag; - typedef Cartesian_tag Kernel_tag; - typedef Dim_ Default_ambient_dimension; - typedef Dim_ Max_ambient_dimension; - typedef Dim_ Dimension; - typedef LA_ LA; - template struct Ambient_dimension { typedef Dim_ type; }; - - typedef Vec_ LA_vector; - typedef typename LA_vector::Vector Point; - typedef typename LA_vector::Vector Vector; - typedef typename LA_vector::Vector Vector_; - typedef typename LA_vector::Construct_vector Constructor; - typedef typename LA_vector::Vector_const_iterator Point_cartesian_const_iterator; - typedef typename LA_vector::Vector_const_iterator Vector_cartesian_const_iterator; - - template struct Type {}; - template struct Type< Point_tag, D> { typedef Vector_ type; }; - template struct Type { typedef Vector_ type; }; - template struct Type< FT_tag, D> { typedef FT_ type; }; - template struct Type< RT_tag, D> { typedef FT_ type; }; - - typedef typeset - ::add::type - // FIXME: These have nothing to do here. - ::add::type - ::add::type - ::add::type - ::add::type - Object_list; - - typedef typeset< Point_cartesian_const_iterator_tag>::type - ::add::type - Iterator_list; - - template > struct Functor { - typedef Null_functor type; - }; - template struct Functor,D> { - typedef CartesianDVectorBase::Construct_LA_vector type; - }; - template struct Functor,D> { - typedef CartesianDVectorBase::Construct_LA_vector type; - }; - template struct Functor,D> { - typedef CartesianDVectorBase::Construct_cartesian_const_iterator type; - }; - template struct Functor,D> { - typedef CartesianDVectorBase::Construct_cartesian_const_iterator type; - }; - template struct Functor::value> > { - typedef CartesianDVectorBase::Sum_of_vectors type; - }; - template struct Functor::value> > { - typedef CartesianDVectorBase::Difference_of_vectors type; - }; - template struct Functor::value> > { - typedef CartesianDVectorBase::Opposite_vector type; - }; - template struct Functor::value - || !LA_vector::template Property::value> > { - typedef CartesianDVectorBase::Midpoint type; - }; - template struct Functor { - typedef CartesianDVectorBase::Compute_cartesian_coordinate type; - }; - template struct Functor { - typedef CartesianDVectorBase::Compute_cartesian_coordinate type; - }; - template struct Functor { - typedef CartesianDVectorBase::PV_dimension type; - }; - template struct Functor { - typedef CartesianDVectorBase::PV_dimension type; - }; - template struct Functor::value> > { - typedef CartesianDVectorBase::Orientation_of_vectors type; - }; - template struct Functor::value> > { - typedef CartesianDVectorBase::Orientation_of_points type; - }; - template struct Functor::value> > { - typedef CartesianDVectorBase::Scalar_product type; - }; - template struct Functor::value> > { - typedef CartesianDVectorBase::Squared_distance_to_origin_stored type; - }; - // Use integral_constant in case of failure, to distinguish from the previous one. - template struct Functor::value - || !LA_vector::template Property::value)*2> > { - typedef CartesianDVectorBase::Squared_distance_to_origin_via_dotprod type; - }; - template struct Functor { - typedef CartesianDVectorBase::Identity_functor type; - }; - template struct Functor { - typedef CartesianDVectorBase::Identity_functor type; - }; - - CGAL_CONSTEXPR Cartesian_LA_base_d(){} - CGAL_CONSTEXPR Cartesian_LA_base_d(int d):Dimension_base(d){} -}; - -} //namespace CGAL - -#endif // CGAL_KERNEL_D_CARTESIAN_LA_BASE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h deleted file mode 100644 index 871c463a..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h +++ /dev/null @@ -1,344 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_CARTESIAN_LA_FUNCTORS_H -#define CGAL_CARTESIAN_LA_FUNCTORS_H - -#include -#include -#include -#include -#include -#include -#include - -namespace CGAL { -namespace CartesianDVectorBase { -#ifndef CGAL_CXX11 -namespace internal { -template struct Construct_LA_vector_ { - struct Never_use {}; - void operator()(Never_use)const; -}; -#define CGAL_CODE(Z,N,_) template struct Construct_LA_vector_ > { \ - typedef typename R::Constructor Constructor; \ - typedef typename Get_type::type RT; \ - typedef typename R::Vector_ result_type; \ - result_type operator() \ - (BOOST_PP_ENUM_PARAMS(N,RT const& t)) const { \ - return typename Constructor::Values()(BOOST_PP_ENUM_PARAMS(N,t)); \ - } \ - result_type operator() \ - (BOOST_PP_ENUM_PARAMS(BOOST_PP_INC(N),RT const& t)) const { \ - return typename Constructor::Values_divide()(t##N,BOOST_PP_ENUM_PARAMS(N,t)); \ - } \ - }; -BOOST_PP_REPEAT_FROM_TO(2, 11, CGAL_CODE, _ ) -#undef CGAL_CODE -} -#endif - -template struct Construct_LA_vector -: private Store_kernel -#ifndef CGAL_CXX11 -, public internal::Construct_LA_vector_ -#endif -{ - //CGAL_FUNCTOR_INIT_IGNORE(Construct_LA_vector) - CGAL_FUNCTOR_INIT_STORE(Construct_LA_vector) - typedef R_ R; - typedef typename R::Constructor Constructor; - typedef typename Get_type::type RT; - typedef typename Get_type::type FT; - typedef typename R::Vector_ result_type; - typedef typename R_::Default_ambient_dimension Dimension; - result_type operator()(int d)const{ - CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); - return typename Constructor::Dimension()(d); - } - result_type operator()()const{ - return typename Constructor::Dimension()((std::max)(0,this->kernel().dimension())); - } - result_type operator()(int d, Zero_ const&)const{ - CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); - return typename Constructor::Dimension()(d); - } - result_type operator()(Zero_ const&)const{ - // Makes no sense for an unknown dimension. - return typename Constructor::Dimension()(this->kernel().dimension()); - } - result_type operator()(result_type const& v)const{ - return v; - } -#ifdef CGAL_CXX11 - result_type operator()(result_type&& v)const{ - return std::move(v); - } -#endif -#ifdef CGAL_CXX11 - template - typename std::enable_if::value && - boost::is_same, Dimension>::value, - result_type>::type - operator()(U&&...u)const{ - return typename Constructor::Values()(std::forward(u)...); - } - //template::value>::type,class=typename std::enable_if<(sizeof...(U)==static_dim+1)>::type,class=void> - template - typename std::enable_if::value && - boost::is_same, Dimension>::value, - result_type>::type - operator()(U&&...u)const{ - return Apply_to_last_then_rest()(typename Constructor::Values_divide(),std::forward(u)...); - } -#else - using internal::Construct_LA_vector_::operator(); -#endif - template inline - typename boost::enable_if,result_type>::type operator() - (Iter f,Iter g,Cartesian_tag t)const - { - return this->operator()((int)std::distance(f,g),f,g,t); - } - template inline - typename boost::enable_if,result_type>::type operator() - (int d,Iter f,Iter g,Cartesian_tag)const - { - CGAL_assertion(d==std::distance(f,g)); - CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); - return typename Constructor::Iterator()(d,f,g); - } - template inline - typename boost::enable_if,result_type>::type operator() - (Iter f,Iter g,Homogeneous_tag)const - { - --g; - return this->operator()((int)std::distance(f,g),f,g,*g); - } - template inline - typename boost::enable_if,result_type>::type operator() - (int d,Iter f,Iter g,Homogeneous_tag)const - { - --g; - return this->operator()(d,f,g,*g); - } - template inline - typename boost::enable_if,result_type>::type operator() - (Iter f,Iter g)const - { - // Shouldn't it try comparing dist(f,g) to the dimension if it is known? - return this->operator()(f,g,typename R::Rep_tag()); - } - template inline - typename boost::enable_if,result_type>::type operator() - (int d,Iter f,Iter g)const - { - return this->operator()(d,f,g,typename R::Rep_tag()); - } - - // Last homogeneous coordinate given separately - template inline - typename boost::enable_if,result_type>::type operator() - (int d,Iter f,Iter g,NT const&l)const - { - CGAL_assertion(d==std::distance(f,g)); - CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); - // RT? better be safe for now - return typename Constructor::Iterator()(d,CGAL::make_transforming_iterator(f,Divide(l)),CGAL::make_transforming_iterator(g,Divide(l))); - } - template inline - typename boost::enable_if,result_type>::type operator() - (Iter f,Iter g,NT const&l)const - { - return this->operator()((int)std::distance(f,g),f,g,l); - } -}; - -template struct Compute_cartesian_coordinate { - CGAL_FUNCTOR_INIT_IGNORE(Compute_cartesian_coordinate) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename R::Vector_ first_argument_type; - typedef int second_argument_type; - typedef Tag_true Is_exact; -#ifdef CGAL_CXX11 - typedef decltype(std::declval()[0]) result_type; -#else - typedef RT const& result_type; - // RT const& doesn't work with some LA (Eigen2 for instance) so we - // should use plain RT or find a way to detect this. -#endif - - result_type operator()(first_argument_type const& v,int i)const{ - return v[i]; - } -}; - -template struct Construct_cartesian_const_iterator { - CGAL_FUNCTOR_INIT_IGNORE(Construct_cartesian_const_iterator) - typedef R_ R; - typedef typename R::Vector_ argument_type; - typedef typename R::LA_vector S_; - typedef typename R::Point_cartesian_const_iterator result_type; - // same as Vector - typedef Tag_true Is_exact; - - result_type operator()(argument_type const& v,Begin_tag)const{ - return S_::vector_begin(v); - } - result_type operator()(argument_type const& v,End_tag)const{ - return S_::vector_end(v); - } -}; - -template struct Midpoint { - CGAL_FUNCTOR_INIT_IGNORE(Midpoint) - typedef R_ R; - typedef typename Get_type::type first_argument_type; - typedef typename Get_type::type second_argument_type; - typedef typename Get_type::type result_type; - - result_type operator()(result_type const& a, result_type const& b)const{ - return (a+b)/2; - } -}; - -template struct Sum_of_vectors { - CGAL_FUNCTOR_INIT_IGNORE(Sum_of_vectors) - typedef R_ R; - typedef typename Get_type::type first_argument_type; - typedef typename Get_type::type second_argument_type; - typedef typename Get_type::type result_type; - - result_type operator()(result_type const& a, result_type const& b)const{ - return a+b; - } -}; - -template struct Difference_of_vectors { - CGAL_FUNCTOR_INIT_IGNORE(Difference_of_vectors) - typedef R_ R; - typedef typename Get_type::type first_argument_type; - typedef typename Get_type::type second_argument_type; - typedef typename Get_type::type result_type; - - result_type operator()(result_type const& a, result_type const& b)const{ - return a-b; - } -}; - -template struct Opposite_vector { - CGAL_FUNCTOR_INIT_IGNORE(Opposite_vector) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_type::type argument_type; - - result_type operator()(result_type const& v)const{ - return -v; - } -}; - -template struct Scalar_product { - CGAL_FUNCTOR_INIT_IGNORE(Scalar_product) - typedef R_ R; - typedef typename R::LA_vector LA; - typedef typename Get_type::type result_type; - typedef typename Get_type::type first_argument_type; - typedef typename Get_type::type second_argument_type; - - result_type operator()(first_argument_type const& a, second_argument_type const& b)const{ - return LA::dot_product(a,b); - } -}; - -template struct Squared_distance_to_origin_stored { - CGAL_FUNCTOR_INIT_IGNORE(Squared_distance_to_origin_stored) - typedef R_ R; - typedef typename R::LA_vector LA; - typedef typename Get_type::type result_type; - typedef typename Get_type::type argument_type; - - result_type operator()(argument_type const& a)const{ - return LA::squared_norm(a); - } -}; - -template struct Squared_distance_to_origin_via_dotprod { - CGAL_FUNCTOR_INIT_IGNORE(Squared_distance_to_origin_via_dotprod) - typedef R_ R; - typedef typename R::LA_vector LA; - typedef typename Get_type::type result_type; - typedef typename Get_type::type argument_type; - - result_type operator()(argument_type const& a)const{ - return LA::dot_product(a,a); - } -}; - -template struct Orientation_of_vectors { - CGAL_FUNCTOR_INIT_IGNORE(Orientation_of_vectors) - typedef R_ R; - typedef typename R::Vector_cartesian_const_iterator first_argument_type; - typedef typename R::Vector_cartesian_const_iterator second_argument_type; - typedef typename Get_type::type result_type; - typedef typename R::LA_vector LA; - - template - result_type operator()(Iter const& f, Iter const& e) const { - return LA::determinant_of_iterators_to_vectors(f,e); - } -}; - -template struct Orientation_of_points { - CGAL_FUNCTOR_INIT_IGNORE(Orientation_of_points) - typedef R_ R; - typedef typename R::Point_cartesian_const_iterator first_argument_type; - typedef typename R::Point_cartesian_const_iterator second_argument_type; - typedef typename Get_type::type result_type; - typedef typename R::LA_vector LA; - - template - result_type operator()(Iter const& f, Iter const& e) const { - return LA::determinant_of_iterators_to_points(f,e); - } -}; - -template struct PV_dimension { - CGAL_FUNCTOR_INIT_IGNORE(PV_dimension) - typedef R_ R; - typedef typename R::Vector_ argument_type; - typedef int result_type; - typedef typename R::LA_vector LA; - typedef Tag_true Is_exact; - - template - result_type operator()(T const& v) const { - return LA::size_of_vector(v); - } -}; - -template struct Identity_functor { - CGAL_FUNCTOR_INIT_IGNORE(Identity_functor) - template - T const& operator()(T const&t) const { return t; } -}; - -} -} // namespace CGAL -#endif // CGAL_CARTESIAN_LA_FUNCTORS_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h deleted file mode 100644 index 641bf8ae..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_BASE_H -#define CGAL_KERNEL_D_CARTESIAN_BASE_H - -#include -#include -#include - -namespace CGAL { -#define CGAL_BASE \ - Cartesian_LA_base_d< FT_, Dim_ > -template < typename FT_, typename Dim_, typename Derived_=Default> -struct Cartesian_base_d : public CGAL_BASE -{ - CGAL_CONSTEXPR Cartesian_base_d(){} - CGAL_CONSTEXPR Cartesian_base_d(int d):CGAL_BASE(d){} -}; -#undef CGAL_BASE - -} //namespace CGAL - -#endif // CGAL_KERNEL_D_CARTESIAN_BASE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h deleted file mode 100644 index e09c72d0..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H -#define CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H - -#include -#include -#include -#include - -namespace CGAL { - -template < typename Base_, typename FT_, typename LA_=CGAL::LA_eigen > -struct Cartesian_change_FT_base : public - Base_ -{ - CGAL_CONSTEXPR Cartesian_change_FT_base(){} - CGAL_CONSTEXPR Cartesian_change_FT_base(int d):Base_(d){} - - typedef Cartesian_change_FT_base Self; - typedef Base_ Kernel_base; - typedef LA_ LA; - - template struct Type : Inherit_type {}; - template struct Type { typedef FT_ type; }; - template struct Type { typedef FT_ type; }; - - typedef NT_converter::type,FT_> FT_converter; - typedef transforming_iterator Point_cartesian_const_iterator; - typedef transforming_iterator Vector_cartesian_const_iterator; - //FIXME: use Iterator_list! - /* - template::value_tag,FT_tag>::value> - struct Iterator : Get_type {}; - template struct Iterator { - typedef transforming_iterator::type> type; - }; - */ - - template - struct Construct_cartesian_const_iterator_ { - typedef typename Get_functor::type Functor_base; - Construct_cartesian_const_iterator_(){} - Construct_cartesian_const_iterator_(Self const&r):f(r){} - Functor_base f; - typedef Type_ result_type; - template - result_type operator()(T const& v, Begin_tag)const{ - return make_transforming_iterator(f(v,Begin_tag()),FT_converter()); - } - template - result_type operator()(T const& v, End_tag)const{ - return make_transforming_iterator(f(v,End_tag()),FT_converter()); - } - }; - typedef Construct_cartesian_const_iterator_,Point_cartesian_const_iterator> Construct_point_cartesian_const_iterator; - typedef Construct_cartesian_const_iterator_,Vector_cartesian_const_iterator> Construct_vector_cartesian_const_iterator; - - template - struct Compute_cartesian_coordinate { - typedef typename Get_functor::type Functor_base; - Compute_cartesian_coordinate(){} - Compute_cartesian_coordinate(Self const&r):f(r){} - Functor_base f; - typedef FT_ result_type; - template - result_type operator()(Obj_ const& v,int i)const{ - return FT_converter()(f(v,i)); - } - }; - - template::type> struct Functor : - Inherit_functor { }; - template struct Functor { }; - template struct Functor { }; - template struct Functor { - typedef Compute_cartesian_coordinate type; - }; - template struct Functor { - typedef Compute_cartesian_coordinate type; - }; - template struct Functor,D,Construct_iterator_tag> { - typedef Construct_point_cartesian_const_iterator type; - }; - template struct Functor,D,Construct_iterator_tag> { - typedef Construct_vector_cartesian_const_iterator type; - }; -}; - -template < typename Base_, typename FT_> -struct Cartesian_change_FT : public - Cartesian_change_FT_base -{ - CGAL_CONSTEXPR Cartesian_change_FT(){} - CGAL_CONSTEXPR Cartesian_change_FT(int d):Cartesian_change_FT_base(d){} -}; - -} //namespace CGAL - -#endif // CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h deleted file mode 100644 index ef8921db..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_COMPLETE_H -#define CGAL_KERNEL_D_CARTESIAN_COMPLETE_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#endif // CGAL_KERNEL_D_CARTESIAN_COMPLETE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h deleted file mode 100644 index 179e97bf..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_FILTER_K_H -#define CGAL_KERNEL_D_CARTESIAN_FILTER_K_H - -#include -#include -#include -#include -#include - -namespace CGAL { - -template < typename Base_, typename AK_, typename EK_ > -struct Cartesian_filter_K : public Base_, - private Store_kernel, private Store_kernel2 -{ - CGAL_CONSTEXPR Cartesian_filter_K(){} - CGAL_CONSTEXPR Cartesian_filter_K(int d):Base_(d){} - //FIXME: or do we want an instance of AK and EK belonging to this kernel, - //instead of a reference to external ones? - CGAL_CONSTEXPR Cartesian_filter_K(AK_ const&a,EK_ const&b):Base_(),Store_kernel(a),Store_kernel2(b){} - CGAL_CONSTEXPR Cartesian_filter_K(int d,AK_ const&a,EK_ const&b):Base_(d),Store_kernel(a),Store_kernel2(b){} - typedef Base_ Kernel_base; - typedef AK_ AK; - typedef EK_ EK; - typedef typename Store_kernel::reference_type AK_rt; - AK_rt approximate_kernel()const{return this->kernel();} - typedef typename Store_kernel2::reference2_type EK_rt; - EK_rt exact_kernel()const{return this->kernel2();} - - // MSVC is too dumb to perform the empty base optimization. - typedef boost::mpl::and_< - internal::Do_not_store_kernel, - internal::Do_not_store_kernel, - internal::Do_not_store_kernel > Do_not_store_kernel; - - //TODO: C2A/C2E could be able to convert *this into this->kernel() or this->kernel2(). - typedef KernelD_converter C2A; - typedef KernelD_converter C2E; - - // fix the types - // TODO: only fix some types, based on some criterion? - template struct Type : Get_type {}; - - template::type> struct Functor : - Inherit_functor {}; - template struct Functor { - typedef typename Get_functor::type AP; - typedef typename Get_functor::type EP; - typedef Filtered_predicate2 type; - }; -// TODO: -// template struct Functor : -// Kernel_base::template Functor {}; -// TODO: -// detect when Less_cartesian_coordinate doesn't need filtering -}; - -} //namespace CGAL - -#endif // CGAL_KERNEL_D_CARTESIAN_FILTER_K_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h deleted file mode 100644 index c390a55c..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H -#define CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H - -#include -#include -#include - -namespace CGAL { - -template < typename Base_ > -struct Cartesian_filter_NT : public Base_ -{ - CGAL_CONSTEXPR Cartesian_filter_NT(){} - CGAL_CONSTEXPR Cartesian_filter_NT(int d):Base_(d){} - typedef Base_ Kernel_base; - typedef Cartesian_change_FT K1; - typedef typename internal::Exact_field_selector::type>::Type Exact_nt; - typedef Cartesian_change_FT K2; - - template::type> struct Functor : - Inherit_functor {}; - template struct Functor { - struct type { - //TODO: use compression (derive from a compressed_pair?) - typedef typename Get_functor::type P1; P1 p1; - typedef typename Get_functor::type P2; P2 p2; - typedef typename P2::result_type result_type; - type(){} - type(Cartesian_filter_NT const&k):p1(reinterpret_cast(k)),p2(reinterpret_cast(k)){} - //FIXME: if predicate's constructor takes a kernel as argument, how do we translate that? reinterpret_cast is really ugly and possibly unsafe. - -#ifdef CGAL_CXX11 - template result_type operator()(U&&...u)const{ - { - Protect_FPU_rounding p; - try { - typename P1::result_type res=p1(u...); // don't forward as u may be reused - if(is_certain(res)) return get_certain(res); - } catch (Uncertain_conversion_exception) {} - } - return p2(std::forward(u)...); - } -#else - result_type operator()()const{ // does it make sense to have 0 argument? - { - Protect_FPU_rounding p; - try { - typename P1::result_type res=p1(); - if(is_certain(res)) return get_certain(res); - } catch (Uncertain_conversion_exception) {} - } - return p2(); - } -#define CGAL_CODE(Z,N,_) template result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t))const{ \ - { \ - Protect_FPU_rounding p; \ - try { \ - typename P1::result_type res=p1(BOOST_PP_ENUM_PARAMS(N,t)); \ - if(is_certain(res)) return get_certain(res); \ - } catch (Uncertain_conversion_exception) {} \ - } \ - return p2(BOOST_PP_ENUM_PARAMS(N,t)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - -#endif - }; - }; -}; - -} //namespace CGAL - -#endif // CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h deleted file mode 100644 index 179f7319..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_CARTESIAN_PER_DIM_H -#define CGAL_KD_CARTESIAN_PER_DIM_H -#include -#include -#include - -// Should probably disappear. - -namespace CGAL { -template -struct Cartesian_per_dimension : public R_ {}; -} - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h deleted file mode 100644 index 693e962a..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_CARTESIAN_STATIC_FILTERS_H -#define CGAL_KD_CARTESIAN_STATIC_FILTERS_H -#include -#include -#include // bug, should be included by the next one -#include -#include - -namespace CGAL { -namespace SFA { // static filter adapter -// Note that this would be quite a bit simpler without stateful kernels -template struct Orientation_of_points_2 : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Orientation_of_points_2) - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - typedef typename Get_type::type FT; - typedef typename Get_functor::type CC; - typedef typename Get_functor::type Orientation_base; - // TODO: Move this out for easy reuse - struct Adapter { - struct Point_2 { - R_ const&r; CC const&c; Point const& p; - Point_2(R_ const&r_, CC const&c_, Point const&p_):r(r_),c(c_),p(p_){} - // use result_of instead? - typename CC::result_type x()const{return c(p,0);} - typename CC::result_type y()const{return c(p,1);} - }; - struct Vector_2 {}; - struct Circle_2 {}; - struct Orientation_2 { - typedef typename Orientation_of_points_2::result_type result_type; - result_type operator()(Point_2 const&A, Point_2 const&B, Point_2 const&C)const{ - Point const* t[3]={&A.p,&B.p,&C.p}; - return Orientation_base(A.r)(make_transforming_iterator(t+0),make_transforming_iterator(t+3)); - } - }; - }; - template result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{ - CC c(this->kernel()); - Point const& A=*f; - Point const& B=*++f; - Point const& C=*++f; - CGAL_assertion(++f==e); - typedef typename Adapter::Point_2 P; - return typename internal::Static_filters_predicates::Orientation_2()(P(this->kernel(),c,A),P(this->kernel(),c,B),P(this->kernel(),c,C)); - } -}; -} - -template -struct Cartesian_static_filters : public R_ { - CGAL_CONSTEXPR Cartesian_static_filters(){} - CGAL_CONSTEXPR Cartesian_static_filters(int d):R_(d){} -}; - -template -struct Cartesian_static_filters, R_, Derived_> : public R_ { - CGAL_CONSTEXPR Cartesian_static_filters(){} - CGAL_CONSTEXPR Cartesian_static_filters(int d):R_(d){} - typedef Cartesian_static_filters, R_, Derived_> Self; - typedef typename Default::Get::type Derived; - template struct Functor : Inherit_functor {}; - template struct Functor { - typedef - //typename boost::mpl::if_ < - //boost::is_same, - //typename Get_functor::type, - SFA::Orientation_of_points_2 - // >::type - type; - }; -}; - -} - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h deleted file mode 100644 index 43015d24..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h +++ /dev/null @@ -1,330 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_COAFFINE_H -#define CGAL_KD_COAFFINE_H -#include -#include -#include -#include -#include - -namespace CGAL { -namespace CartesianDKernelFunctors { -struct Flat_orientation { - std::vector proj; - std::vector rest; - bool reverse; -}; - -// For debugging purposes -inline std::ostream& operator<< (std::ostream& o, Flat_orientation const& f) { - o << "Proj: "; - for(std::vector::const_iterator i=f.proj.begin(); - i!=f.proj.end(); ++i) - o << *i << ' '; - o << "\nRest: "; - for(std::vector::const_iterator i=f.rest.begin(); - i!=f.rest.end(); ++i) - o << *i << ' '; - o << "\nInv: " << f.reverse; - return o << '\n'; -} - -namespace internal { -namespace coaffine { -template -inline void debug_matrix(std::ostream& o, Mat const&mat) { - for(int i=0;i struct Construct_flat_orientation : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_flat_orientation) - typedef R_ R; - typedef typename Get_type::type FT; - typedef typename Get_type::type Point; - typedef typename Increment_dimension::type Dplusone; - typedef typename R::LA::template Rebind_dimension::Other LA; - typedef typename LA::Square_matrix Matrix; - typedef typename Get_functor::type CCC; - typedef typename Get_functor::type PD; - typedef Flat_orientation result_type; - - // This implementation is going to suck. Maybe we should push the - // functionality into LA. And we should check (in debug mode) that - // the points are affinely independent. - template - result_type operator()(Iter f, Iter e)const{ - Iter f_save = f; - PD pd (this->kernel()); - CCC ccc (this->kernel()); - int dim = pd(*f); - Matrix coord (dim+1, dim+1); // use distance(f,e)? This matrix doesn't need to be square. - int col = 0; - Flat_orientation o; - std::vector& proj=o.proj; - std::vector& rest=o.rest; rest.reserve(dim+1); - for(int i=0; i p; - try - { - // No forward here, the arguments may still be needed - Ares res = ap(c2a(args)...); - if (is_certain(res)) - return get_certain(res); - } - catch (Uncertain_conversion_exception) {} - } - CGAL_BRANCH_PROFILER_BRANCH(tmp); - Protect_FPU_rounding p(CGAL_FE_TONEAREST); - return ep(c2e(std::forward(args))...); - } -#else - -#define CGAL_VAR(Z,N,C) C(a##N) -#define CGAL_CODE(Z,N,_) \ - template \ - result_type \ - operator()(BOOST_PP_ENUM_BINARY_PARAMS(N, A, const& a)) const \ - { \ - CGAL_BRANCH_PROFILER(std::string(" failures/calls to : ") + std::string(CGAL_PRETTY_FUNCTION), tmp); \ - { \ - Protect_FPU_rounding p; \ - try \ - { \ - Ares res = ap(BOOST_PP_ENUM(N,CGAL_VAR,c2a)); \ - if (is_certain(res)) \ - return get_certain(res); \ - } \ - catch (Uncertain_conversion_exception) {} \ - } \ - CGAL_BRANCH_PROFILER_BRANCH(tmp); \ - Protect_FPU_rounding p(CGAL_FE_TONEAREST); \ - return ep(BOOST_PP_ENUM(N,CGAL_VAR,c2e)); \ - } - BOOST_PP_REPEAT_FROM_TO(1, 10, CGAL_CODE, _ ) -#undef CGAL_CODE -#undef CGAL_VAR - -#endif -}; - -} //namespace CGAL - -#endif // CGAL_FILTERED_PREDICATE2_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h deleted file mode 100644 index a8896976..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_CONVERTER_H -#define CGAL_KERNEL_D_CARTESIAN_CONVERTER_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace CGAL { -namespace internal { -// Reverses order, but that shouldn't matter. -template struct Map_taglist_to_typelist : - Map_taglist_to_typelist::type - ::template add::type> -{}; -template struct Map_taglist_to_typelist > : typeset<> {}; -} - -template > -struct Object_converter { - typedef Object result_type; - template - result_type operator()(Object const& o, F const& f) const { - typedef typename List::head H; - if (H const* ptr = object_cast(&o)) - return make_object(f(*ptr)); - else - return Object_converter()(o,f); - } -}; -template<> -struct Object_converter > { - typedef Object result_type; - template - result_type operator()(Object const&,F const&)const { - CGAL_error_msg("Cartesiand_converter is unable to determine what is wrapped in the Object"); - return Object(); - } -}; - - - //TODO: special case when K1==K2 (or they are very close?) -template -class KernelD_converter_ -: public KernelD_converter_ -{ - typedef typename List::head Tag_; - typedef typename List::tail Rest; - typedef KernelD_converter_ Base; - typedef typename Get_type::type K1_Obj; - typedef typename Get_type::type K2_Obj; - typedef typename Get_functor >::type K1_Conv; - typedef KO_converter KOC; - typedef CGAL_BOOSTD is_same no_converter; - typedef typename internal::Map_taglist_to_typelist::type::template contains duplicate; - - // Disable the conversion in some cases: - struct Do_not_use{}; - - // Explicit calls to boost::mpl functions to avoid parenthesis - // warning on some versions of GCC - typedef typename boost::mpl::if_ < - // If Point==Vector, keep only one conversion - boost::mpl::or_, - // For iterator objects, the default is make_transforming_iterator - boost::mpl::bool_<(iterator_tag_traits::is_iterator && no_converter::value)> >, - Do_not_use,K1_Obj>::type argument_type; - //typedef typename KOC::argument_type K1_Obj; - //typedef typename KOC::result_type K2_Obj; - public: - using Base::operator(); // don't use directly, just make it accessible to the next level - K2_Obj helper(K1_Obj const& o,CGAL_BOOSTD true_type)const{ - return KOC()(this->myself().kernel(),this->myself().kernel2(),this->myself(),o); - } - K2_Obj helper(K1_Obj const& o,CGAL_BOOSTD false_type)const{ - return K1_Conv(this->myself().kernel())(this->myself().kernel2(),this->myself(),o); - } - K2_Obj operator()(argument_type const& o)const{ - return helper(o,no_converter()); - } - template struct result:Base::template result{}; - template struct result {typedef K2_Obj type;}; -}; - -template -class KernelD_converter_ > { - public: - struct Do_not_use2{}; - void operator()(Do_not_use2)const{} - template struct result; - Final_& myself(){return *static_cast(this);} - Final_ const& myself()const{return *static_cast(this);} -}; - - -// TODO: use the intersection of Kn::Object_list. -template::type -//typeset::add::type/*::add::type*/ -> class KernelD_converter - : public Store_kernel, public Store_kernel2, - public KernelD_converter_,K1,K2,List_> -{ - typedef KernelD_converter Self; - typedef Self Final_; - typedef KernelD_converter_ Base; - typedef typename Get_type::type FT1; - typedef typename Get_type::type FT2; - typedef NT_converter NTc; - NTc c; // TODO: compressed storage as this is likely empty and the converter gets passed around (and stored in iterators) - - public: - KernelD_converter(){} - KernelD_converter(K1 const&a,K2 const&b):Store_kernel(a),Store_kernel2(b){} - - // For boost::result_of, used in transforming_iterator - template::value?42:0> struct result:Base::template result{}; - template struct result { - typedef transforming_iterator type; - }; - template struct result{typedef K2 type;}; - template struct result{typedef int type;}; - // Ideally the next 2 would come with Point_tag and Vector_tag, but that's hard... - template struct result{typedef Origin type;}; - template struct result{typedef Null_vector type;}; - template struct result{typedef Object type;}; - template struct result{typedef FT2 type;}; - - using Base::operator(); - typename Store_kernel2::reference2_type operator()(K1 const&)const{return this->kernel2();} - int operator()(int i)const{return i;} - Origin operator()(Origin const&o)const{return o;} - Null_vector operator()(Null_vector const&v)const{return v;} - FT2 operator()(FT1 const&x)const{return c(x);} - //RT2 operator()(typename First_if_different::Type const&x)const{return cr(x);} - - typename Get_type::type const& - operator()(typename Get_type::type const&o)const - { return o; } // Both kernels should have the same, returning a reference should warn if not. - - template - transforming_iterator,It>::type> - operator()(It const& it) const { - return make_transforming_iterator(it,*this); - } - - template - //TODO: use decltype in C++11 instead of result - std::vector::type> - operator()(const std::vector& v) const { - return std::vector::type>(operator()(v.begin()),operator()(v.begin())); - } - - //TODO: convert std::list and other containers? - - Object - operator()(const Object &obj) const - { - typedef typename internal::Map_taglist_to_typelist::type Possibilities; - //TODO: add Empty, vector, etc to the list. - return Object_converter()(obj,*this); - } - - //TODO: convert boost::variant - -}; - -} //namespace CGAL - -#endif // CGAL_KERNEL_D_CARTESIAN_CONVERTER_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h deleted file mode 100644 index fa30dff0..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_KERNEL_2_INTERFACE_H -#define CGAL_KD_KERNEL_2_INTERFACE_H - -#include -#include -#include -#include - - -namespace CGAL { -template struct Kernel_2_interface : public Base_ { - typedef Base_ Base; - typedef Kernel_2_interface Kernel; - typedef typename Get_type::type RT; - typedef typename Get_type::type FT; - typedef typename Get_type::type Boolean; - typedef typename Get_type::type Sign; - typedef typename Get_type::type Comparison_result; - typedef typename Get_type::type Orientation; - typedef typename Get_type::type Oriented_side; - typedef typename Get_type::type Bounded_side; - typedef typename Get_type::type Angle; - typedef typename Get_type::type Point_2; - typedef typename Get_type::type Vector_2; - typedef typename Get_type::type Segment_2; - typedef cpp0x::tuple Triangle_2; // triangulation insists... - template struct Help_2p_i { - typedef typename Get_functor::type LT; - typedef typename LT::result_type result_type; - LT lt; - Help_2p_i(Kernel const&k):lt(k){} - result_type operator()(Point_2 const&a, Point_2 const&b) { - return lt(a,b,i); - } - }; - typedef Help_2p_i Less_x_2; - typedef Help_2p_i Less_y_2; - typedef Help_2p_i Compare_x_2; - typedef Help_2p_i Compare_y_2; - struct Compare_distance_2 { - typedef typename Get_functor::type CD; - typedef typename CD::result_type result_type; - CD cd; - Compare_distance_2(Kernel const&k):cd(k){} - result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c) { - return cd(a,b,c); - } - result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c, Point_2 const&d) { - return cd(a,b,c,d); - } - }; - struct Orientation_2 { - typedef typename Get_functor::type O; - typedef typename O::result_type result_type; - O o; - Orientation_2(Kernel const&k):o(k){} - result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c) { - //return o(a,b,c); - Point_2 const* t[3]={&a,&b,&c}; - return o(make_transforming_iterator(t+0),make_transforming_iterator(t+3)); - - } - }; - struct Side_of_oriented_circle_2 { - typedef typename Get_functor::type SOS; - typedef typename SOS::result_type result_type; - SOS sos; - Side_of_oriented_circle_2(Kernel const&k):sos(k){} - result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c, Point_2 const&d) { - //return sos(a,b,c,d); - Point_2 const* t[4]={&a,&b,&c,&d}; - return sos(make_transforming_iterator(t+0),make_transforming_iterator(t+4)); - } - }; - Less_x_2 less_x_2_object()const{ return Less_x_2(*this); } - Less_y_2 less_y_2_object()const{ return Less_y_2(*this); } - Compare_x_2 compare_x_2_object()const{ return Compare_x_2(*this); } - Compare_y_2 compare_y_2_object()const{ return Compare_y_2(*this); } - Compare_distance_2 compare_distance_2_object()const{ return Compare_distance_2(*this); } - Orientation_2 orientation_2_object()const{ return Orientation_2(*this); } - Side_of_oriented_circle_2 side_of_oriented_circle_2_object()const{ return Side_of_oriented_circle_2(*this); } -}; -} - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h deleted file mode 100644 index 96076aa8..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_KERNEL_3_INTERFACE_H -#define CGAL_KD_KERNEL_3_INTERFACE_H - -#include -#include -#include -#include - - -namespace CGAL { -template struct Kernel_3_interface : public Base_ { - typedef Base_ Base; - typedef Kernel_3_interface Kernel; - typedef typename Get_type::type RT; - typedef typename Get_type::type FT; - typedef typename Get_type::type Boolean; - typedef typename Get_type::type Sign; - typedef typename Get_type::type Comparison_result; - typedef typename Get_type::type Orientation; - typedef typename Get_type::type Oriented_side; - typedef typename Get_type::type Bounded_side; - typedef typename Get_type::type Angle; - typedef typename Get_type::type Point_3; - typedef typename Get_type::type Vector_3; - typedef typename Get_type::type Segment_3; - typedef cpp0x::tuple Triangle_3; // placeholder - typedef cpp0x::tuple Tetrahedron_3; // placeholder - struct Compare_xyz_3 { - typedef typename Get_functor::type CL; - typedef typename CL::result_type result_type; - CL cl; - Compare_xyz_3(Kernel const&k):cl(k){} - result_type operator()(Point_3 const&a, Point_3 const&b) { - return cl(a,b); - } - }; - struct Compare_distance_3 { - typedef typename Get_functor::type CD; - typedef typename CD::result_type result_type; - CD cd; - Compare_distance_3(Kernel const&k):cd(k){} - result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c) { - return cd(a,b,c); - } - result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d) { - return cd(a,b,c,d); - } - }; - struct Orientation_3 { - typedef typename Get_functor::type O; - typedef typename O::result_type result_type; - O o; - Orientation_3(Kernel const&k):o(k){} - result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d) { - //return o(a,b,c,d); - Point_3 const* t[4]={&a,&b,&c,&d}; - return o(make_transforming_iterator(t+0),make_transforming_iterator(t+4)); - - } - }; - struct Side_of_oriented_sphere_3 { - typedef typename Get_functor::type SOS; - typedef typename SOS::result_type result_type; - SOS sos; - Side_of_oriented_sphere_3(Kernel const&k):sos(k){} - result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d, Point_3 const&e) { - //return sos(a,b,c,d); - Point_3 const* t[5]={&a,&b,&c,&d,&e}; - return sos(make_transforming_iterator(t+0),make_transforming_iterator(t+5)); - } - }; - - // I don't have the Coplanar predicates (yet) - - - Compare_xyz_3 compare_xyz_3_object()const{ return Compare_xyz_3(*this); } - Compare_distance_3 compare_distance_3_object()const{ return Compare_distance_3(*this); } - Orientation_3 orientation_3_object()const{ return Orientation_3(*this); } - Side_of_oriented_sphere_3 side_of_oriented_sphere_3_object()const{ return Side_of_oriented_sphere_3(*this); } -}; -} - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h deleted file mode 100644 index dd888005..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h +++ /dev/null @@ -1,298 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_KERNEL_D_INTERFACE_H -#define CGAL_KD_KERNEL_D_INTERFACE_H - -#include -#include -#include -#include - - -namespace CGAL { -template struct Kernel_d_interface : public Base_ { - CGAL_CONSTEXPR Kernel_d_interface(){} - CGAL_CONSTEXPR Kernel_d_interface(int d):Base_(d){} - - typedef Base_ Base; - typedef Kernel_d_interface Kernel; - typedef Base_ R_; // for the macros - typedef typename Get_type::type RT; - typedef typename Get_type::type FT; - typedef typename Get_type::type Boolean; - typedef typename Get_type::type Sign; - typedef typename Get_type::type Comparison_result; - typedef typename Get_type::type Orientation; - typedef typename Get_type::type Oriented_side; - typedef typename Get_type::type Bounded_side; - typedef typename Get_type::type Angle; - typedef typename Get_type::type Flat_orientation_d; - typedef typename Get_type::type Point_d; - typedef typename Get_type::type Vector_d; - typedef typename Get_type::type Segment_d; - typedef typename Get_type::type Sphere_d; - typedef typename Get_type::type Hyperplane_d; - typedef Vector_d Direction_d; - typedef typename Get_type::type Line_d; - typedef typename Get_type::type Ray_d; - typedef typename Get_type::type Iso_box_d; - typedef typename Get_type::type Aff_transformation_d; - typedef typename Get_type::type Weighted_point_d; - typedef typename Get_functor::type Compute_coordinate_d; - typedef typename Get_functor::type Compare_lexicographically_d; - typedef typename Get_functor::type Equal_d; - typedef typename Get_functor::type Less_lexicographically_d; - typedef typename Get_functor::type Less_or_equal_lexicographically_d; - // FIXME: and vectors? - typedef typename Get_functor::type Orientation_d; - typedef typename Get_functor::type Less_coordinate_d; - typedef typename Get_functor::type Point_dimension_d; - typedef typename Get_functor::type Side_of_oriented_sphere_d; - typedef typename Get_functor::type Power_side_of_power_sphere_d; - typedef typename Get_functor::type Power_center_d; - typedef typename Get_functor::type Power_distance_d; - typedef typename Get_functor::type Contained_in_affine_hull_d; - typedef typename Get_functor::type Construct_flat_orientation_d; - typedef typename Get_functor::type In_flat_orientation_d; - typedef typename Get_functor::type In_flat_side_of_oriented_sphere_d; - typedef typename Get_functor::type In_flat_power_side_of_power_sphere_d; - typedef typename Get_functor::type Point_to_vector_d; - typedef typename Get_functor::type Vector_to_point_d; - typedef typename Get_functor::type Translated_point_d; - typedef typename Get_functor::type Scaled_vector_d; - typedef typename Get_functor::type Difference_of_vectors_d; - typedef typename Get_functor::type Difference_of_points_d; - //typedef typename Get_functor >::type Construct_point_d; - struct Construct_point_d : private Store_kernel { - typedef Kernel R_; // for the macro - CGAL_FUNCTOR_INIT_STORE(Construct_point_d) - typedef typename Get_functor >::type CP; - typedef Point_d result_type; - Point_d operator()(Weighted_point_d const&wp)const{ - return typename Get_functor::type(this->kernel())(wp); - } -#ifdef CGAL_CXX11 - Point_d operator()(Weighted_point_d &wp)const{ - return typename Get_functor::type(this->kernel())(wp); - } - Point_d operator()(Weighted_point_d &&wp)const{ - return typename Get_functor::type(this->kernel())(std::move(wp)); - } - Point_d operator()(Weighted_point_d const&&wp)const{ - return typename Get_functor::type(this->kernel())(std::move(wp)); - } - template -# if __cplusplus >= 201402L - decltype(auto) -# else - Point_d -# endif - operator()(T&&...t)const{ - return CP(this->kernel())(std::forward(t)...); - //return CP(this->kernel())(t...); - } -#else -# define CGAL_CODE(Z,N,_) template \ - Point_d operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t))const{ \ - return CP(this->kernel())(BOOST_PP_ENUM_PARAMS(N,t)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -# undef CGAL_CODE - Point_d operator()()const{ \ - return CP(this->kernel())(); \ - } -#endif - }; - typedef typename Get_functor >::type Construct_vector_d; - typedef typename Get_functor >::type Construct_segment_d; - typedef typename Get_functor >::type Construct_sphere_d; - typedef typename Get_functor >::type Construct_hyperplane_d; - typedef Construct_vector_d Construct_direction_d; - typedef typename Get_functor >::type Construct_line_d; - typedef typename Get_functor >::type Construct_ray_d; - typedef typename Get_functor >::type Construct_iso_box_d; - typedef typename Get_functor >::type Construct_aff_transformation_d; - typedef typename Get_functor >::type Construct_weighted_point_d; - typedef typename Get_functor::type Midpoint_d; - struct Component_accessor_d : private Store_kernel { - typedef Kernel R_; // for the macro - CGAL_FUNCTOR_INIT_STORE(Component_accessor_d) - int dimension(Point_d const&p){ - return this->kernel().point_dimension_d_object()(p); - } - FT cartesian(Point_d const&p, int i){ - return this->kernel().compute_coordinate_d_object()(p,i); - } - RT homogeneous(Point_d const&p, int i){ - if (i == dimension(p)) - return 1; - return cartesian(p, i); - } - }; - struct Construct_cartesian_const_iterator_d : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_cartesian_const_iterator_d) - typedef typename Get_functor >::type CPI; - typedef typename Get_functor >::type CVI; - // FIXME: The following sometimes breaks compilation. The typedef below forces instantiation of this, which forces Point_d, which itself (in the wrapper) needs the derived kernel to tell it what the base kernel is, and that's a cycle. The exact circumstances are not clear, g++ and clang++ are ok in both C++03 and C++11, it is only clang in C++11 without CGAL_CXX11 that breaks. For now, rely on result_type. - //typedef typename CGAL::decay::type>::type result_type; - typedef typename CGAL::decay::type result_type; - // Kernel_d requires a common iterator type for points and vectors - // TODO: provide this mixed functor in preKernel? - //CGAL_static_assertion((boost::is_same::type>::type, result_type>::value)); - CGAL_static_assertion((boost::is_same::type, result_type>::value)); - template - result_type operator()(Point_d const&p, Tag_ t)const{ - return CPI(this->kernel())(p,t); - } - template - result_type operator()(typename First_if_different::Type const&v, Tag_ t)const{ - return CVI(this->kernel())(v,t); - } - - template - result_type operator()(Obj const&o)const{ - return operator()(o, Begin_tag()); - } - result_type operator()(Point_d const&p, int)const{ - return operator()(p, End_tag()); - } - result_type operator()(typename First_if_different::Type const&v, int)const{ - return operator()(v, End_tag()); - } - }; - struct Compute_squared_radius_d : private Store_kernel { - typedef Kernel R_; // for the macro - CGAL_FUNCTOR_INIT_STORE(Compute_squared_radius_d) - typedef FT result_type; - template FT operator()(CGAL_FORWARDABLE(S) s)const{ - return typename Get_functor::type(this->kernel())(CGAL_FORWARD(S,s)); - } - template FT operator()(I b, I e)const{ - return typename Get_functor::type(this->kernel())(b,e); - } - }; - typedef typename Construct_cartesian_const_iterator_d::result_type Cartesian_const_iterator_d; - typedef typename Get_functor::type Squared_distance_d; - typedef typename Get_functor::type Squared_length_d; - typedef typename Get_functor::type Scalar_product_d; - typedef typename Get_functor::type Affine_rank_d; - typedef typename Get_functor::type Affinely_independent_d; - typedef typename Get_functor::type Contained_in_linear_hull_d; - typedef typename Get_functor::type Contained_in_simplex_d; - typedef typename Get_functor::type Has_on_positive_side_d; - typedef typename Get_functor::type Linear_rank_d; - typedef typename Get_functor::type Linearly_independent_d; - typedef typename Get_functor::type Oriented_side_d; - typedef typename Get_functor::type Side_of_bounded_sphere_d; - - typedef typename Get_functor::type Center_of_sphere_d; - typedef Center_of_sphere_d Construct_center_d; // RangeSearchTraits - typedef typename Get_functor::type Construct_circumcenter_d; - typedef typename Get_functor::type Value_at_d; - typedef typename Get_functor::type Point_of_sphere_d; - typedef typename Get_functor::type Orthogonal_vector_d; - typedef typename Get_functor::type Linear_base_d; - typedef typename Get_functor::type Construct_min_vertex_d; - typedef typename Get_functor::type Construct_max_vertex_d; - - typedef typename Get_functor::type Compute_weight_d; - typedef typename Get_functor::type Point_drop_weight_d; - - //TODO: - //typedef ??? Intersect_d; - - - Compute_coordinate_d compute_coordinate_d_object()const{ return Compute_coordinate_d(*this); } - Has_on_positive_side_d has_on_positive_side_d_object()const{ return Has_on_positive_side_d(*this); } - Compare_lexicographically_d compare_lexicographically_d_object()const{ return Compare_lexicographically_d(*this); } - Equal_d equal_d_object()const{ return Equal_d(*this); } - Less_lexicographically_d less_lexicographically_d_object()const{ return Less_lexicographically_d(*this); } - Less_or_equal_lexicographically_d less_or_equal_lexicographically_d_object()const{ return Less_or_equal_lexicographically_d(*this); } - Less_coordinate_d less_coordinate_d_object()const{ return Less_coordinate_d(*this); } - Orientation_d orientation_d_object()const{ return Orientation_d(*this); } - Oriented_side_d oriented_side_d_object()const{ return Oriented_side_d(*this); } - Point_dimension_d point_dimension_d_object()const{ return Point_dimension_d(*this); } - Point_of_sphere_d point_of_sphere_d_object()const{ return Point_of_sphere_d(*this); } - Side_of_oriented_sphere_d side_of_oriented_sphere_d_object()const{ return Side_of_oriented_sphere_d(*this); } - Power_side_of_power_sphere_d power_side_of_power_sphere_d_object()const{ return Power_side_of_power_sphere_d(*this); } - Power_center_d power_center_d_object()const{ return Power_center_d(*this); } - Power_distance_d power_distance_d_object()const{ return Power_distance_d(*this); } - Side_of_bounded_sphere_d side_of_bounded_sphere_d_object()const{ return Side_of_bounded_sphere_d(*this); } - Contained_in_affine_hull_d contained_in_affine_hull_d_object()const{ return Contained_in_affine_hull_d(*this); } - Contained_in_linear_hull_d contained_in_linear_hull_d_object()const{ return Contained_in_linear_hull_d(*this); } - Contained_in_simplex_d contained_in_simplex_d_object()const{ return Contained_in_simplex_d(*this); } - Construct_flat_orientation_d construct_flat_orientation_d_object()const{ return Construct_flat_orientation_d(*this); } - In_flat_orientation_d in_flat_orientation_d_object()const{ return In_flat_orientation_d(*this); } - In_flat_side_of_oriented_sphere_d in_flat_side_of_oriented_sphere_d_object()const{ return In_flat_side_of_oriented_sphere_d(*this); } - In_flat_power_side_of_power_sphere_d in_flat_power_side_of_power_sphere_d_object()const{ return In_flat_power_side_of_power_sphere_d(*this); } - Point_to_vector_d point_to_vector_d_object()const{ return Point_to_vector_d(*this); } - Vector_to_point_d vector_to_point_d_object()const{ return Vector_to_point_d(*this); } - Translated_point_d translated_point_d_object()const{ return Translated_point_d(*this); } - Scaled_vector_d scaled_vector_d_object()const{ return Scaled_vector_d(*this); } - Difference_of_vectors_d difference_of_vectors_d_object()const{ return Difference_of_vectors_d(*this); } - Difference_of_points_d difference_of_points_d_object()const{ return Difference_of_points_d(*this); } - Affine_rank_d affine_rank_d_object()const{ return Affine_rank_d(*this); } - Affinely_independent_d affinely_independent_d_object()const{ return Affinely_independent_d(*this); } - Linear_base_d linear_base_d_object()const{ return Linear_base_d(*this); } - Linear_rank_d linear_rank_d_object()const{ return Linear_rank_d(*this); } - Linearly_independent_d linearly_independent_d_object()const{ return Linearly_independent_d(*this); } - Midpoint_d midpoint_d_object()const{ return Midpoint_d(*this); } - Value_at_d value_at_d_object()const{ return Value_at_d(*this); } - /// Intersect_d intersect_d_object()const{ return Intersect_d(*this); } - Component_accessor_d component_accessor_d_object()const{ return Component_accessor_d(*this); } - Orthogonal_vector_d orthogonal_vector_d_object()const{ return Orthogonal_vector_d(*this); } - Construct_cartesian_const_iterator_d construct_cartesian_const_iterator_d_object()const{ return Construct_cartesian_const_iterator_d(*this); } - Construct_point_d construct_point_d_object()const{ return Construct_point_d(*this); } - Construct_vector_d construct_vector_d_object()const{ return Construct_vector_d(*this); } - Construct_segment_d construct_segment_d_object()const{ return Construct_segment_d(*this); } - Construct_sphere_d construct_sphere_d_object()const{ return Construct_sphere_d(*this); } - Construct_hyperplane_d construct_hyperplane_d_object()const{ return Construct_hyperplane_d(*this); } - Compute_squared_radius_d compute_squared_radius_d_object()const{ return Compute_squared_radius_d(*this); } - Squared_distance_d squared_distance_d_object()const{ return Squared_distance_d(*this); } - Squared_length_d squared_length_d_object()const{ return Squared_length_d(*this); } - Scalar_product_d scalar_product_d_object()const{ return Scalar_product_d(*this); } - Center_of_sphere_d center_of_sphere_d_object()const{ return Center_of_sphere_d(*this); } - Construct_circumcenter_d construct_circumcenter_d_object()const{ return Construct_circumcenter_d(*this); } - Construct_direction_d construct_direction_d_object()const{ return Construct_direction_d(*this); } - Construct_line_d construct_line_d_object()const{ return Construct_line_d(*this); } - Construct_ray_d construct_ray_d_object()const{ return Construct_ray_d(*this); } - Construct_iso_box_d construct_iso_box_d_object()const{ return Construct_iso_box_d(*this); } - Construct_aff_transformation_d construct_aff_transformation_d_object()const{ return Construct_aff_transformation_d(*this); } - Construct_min_vertex_d construct_min_vertex_d_object()const{ return Construct_min_vertex_d(*this); } - Construct_max_vertex_d construct_max_vertex_d_object()const{ return Construct_max_vertex_d(*this); } - Construct_weighted_point_d construct_weighted_point_d_object()const{ return Construct_weighted_point_d(*this); } - - Compute_weight_d compute_weight_d_object()const{ return Compute_weight_d(*this); } - Point_drop_weight_d point_drop_weight_d_object()const{ return Point_drop_weight_d(*this); } - - // Dummies for those required functors missing a concept. - typedef Null_functor Position_on_line_d; - Position_on_line_d position_on_line_d_object()const{return Null_functor();} - typedef Null_functor Barycentric_coordinates_d; - Barycentric_coordinates_d barycentric_coordinates_d_object()const{return Null_functor();} - - /* Not provided because they don't make sense here: - Lift_to_paraboloid_d - Project_along_d_axis_d - */ -}; -} - -#endif // CGAL_KD_KERNEL_D_INTERFACE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h deleted file mode 100644 index 99918ed2..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_KO_CONVERTER_H -#define CGAL_KD_KO_CONVERTER_H -#include -#include -#include // First_if_different -#include -namespace CGAL { -template struct KO_converter; -//TODO: It would probably be better if this was a Misc Functor in K1. -// This way K1 could chose how it wants to present its points (sparse -// iterator?) and derived classes would inherit it. - -namespace internal { -template -struct Point_converter_help { - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - template - result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const { - typename Get_functor >::type i(k1); - typename Get_functor >::type cp(k2); - return cp(conv(i(p,Begin_tag())),conv(i(p,End_tag()))); - } -}; -#ifdef CGAL_CXX11 -// This doesn't seem so useful, the compiler should be able to handle -// the iterators just as efficiently. -template -struct Point_converter_help,K1,K2> { - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - template - result_type help(Indices, K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const { - typename Get_functor::type cc(k1); - typename Get_functor >::type cp(k2); - return cp(conv(cc(p,I))...); - } - template - result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const { - return help(typename N_increasing_indices::type(),k1,k2,conv,p); - } -}; -#endif -} -template struct KO_converter -: internal::Point_converter_help -{}; - -template struct KO_converter{ - typedef typename Get_type::type K1_Vector; - - // Disabling is now done in KernelD_converter - // // can't use vector without at least a placeholder point because of this - // typedef typename K1:: Point K1_Point; - // typedef typename First_if_different::Type argument_type; - - typedef K1_Vector argument_type; - typedef typename Get_type::type result_type; - template - result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& v) const { - typename Get_functor >::type i(k1); - typename Get_functor >::type cp(k2); - return cp(conv(i(v,Begin_tag())),conv(i(v,End_tag()))); - } -}; - -template struct KO_converter{ - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - template - result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const { - typename Get_functor::type f(k1); - typename Get_functor >::type cs(k2); - return cs(conv(f(s,0)),conv(f(s,1))); - } -}; - -template struct KO_converter{ - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - template - result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& h) const { - typename Get_functor::type ov(k1); - typename Get_functor::type ht(k1); - typename Get_functor >::type ch(k2); - return ch(conv(ov(h)),conv(ht(h))); - } -}; - -template struct KO_converter{ - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - template - result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const { - typename Get_functor::type cos(k1); - typename Get_functor::type sr(k1); - typename Get_functor >::type cs(k2); - return cs(conv(cos(s)),conv(sr(s))); - } -}; - -template struct KO_converter{ - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - template - result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const { - typename Get_functor::type pdw(k1); - typename Get_functor::type pw(k1); - typename Get_functor >::type cwp(k2); - return cwp(conv(pdw(s)),conv(pw(s))); - } -}; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h deleted file mode 100644 index ddbdc37b..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h +++ /dev/null @@ -1,175 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_LA_EIGEN_H -#define CGAL_LA_EIGEN_H -#include -#ifndef CGAL_EIGEN3_ENABLED -#error Requires Eigen -#endif -#include -#include -#include -#include -#include -#include - -namespace CGAL { - -//FIXME: where could we use Matrix_base instead of Matrix? -// Dim_ real dimension -// Max_dim_ upper bound on the dimension -template struct LA_eigen { - typedef NT_ NT; - typedef Dim_ Dimension; - typedef Max_dim_ Max_dimension; - enum { dimension = Eigen_dimension::value }; - enum { max_dimension = Eigen_dimension::value }; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef LA_eigen< NT, D2, D3 > Other; - }; - template struct Property : boost::false_type {}; - template struct Property : boost::true_type {}; - template struct Property : boost::true_type {}; - template struct Property : boost::true_type {}; - - typedef Eigen::Matrix::value,1,Eigen::ColMajor|Eigen::AutoAlign,Eigen_dimension::value,1> Vector; - typedef Eigen::Matrix Dynamic_vector; - typedef Construct_eigen Construct_vector; - -#if (EIGEN_WORLD_VERSION>=3) - typedef NT const* Vector_const_iterator; -#else - typedef Iterator_from_indices Vector_const_iterator; -#endif - - templatestatic Vector_const_iterator vector_begin(Vec_ const&a){ -#if (EIGEN_WORLD_VERSION>=3) - return &a[0]; -#else - return Vector_const_iterator(a,0); -#endif - } - - templatestatic Vector_const_iterator vector_end(Vec_ const&a){ -#if (EIGEN_WORLD_VERSION>=3) - // FIXME: Isn't that dangerous if a is an expression and not a concrete vector? - return &a[0]+a.size(); -#else - return Vector_const_iterator(a,a.size()); -#endif - } - - typedef Eigen::Matrix Square_matrix; - typedef Eigen::Matrix Dynamic_matrix; - //TODO: don't pass on the values of Max_* for an expensive NT - // typedef ... Constructor - // typedef ... Accessor -#if 0 - private: - template class Canonicalize_vector { - typedef typename Dimension_eigen::type S1; - typedef typename Dimension_eigen::type S2; - public: - typedef typename Vector::type type; - }; - public: -#endif - - templatestatic int size_of_vector(Vec_ const&v){ - return (int)v.size(); - } - - templatestatic NT dot_product(Vec_ const&a,Vec_ const&b){ - return a.dot(b); - } - - template static int rows(Vec_ const&v) { - return (int)v.rows(); - } - template static int columns(Vec_ const&v) { - return (int)v.cols(); - } - - template static NT determinant(Mat_ const&m,bool=false){ - return m.determinant(); - } - - template static typename - Same_uncertainty_nt::type - sign_of_determinant(Mat_ const&m,bool=false) - { - return CGAL::sign(m.determinant()); - } - - template static int rank(Mat_ const&m){ - // return m.rank(); - // This one uses sqrt so cannot be used with Gmpq - // TODO: use different algo for different NT? - // Eigen::ColPivHouseholderQR decomp(m); - Eigen::FullPivLU decomp(m); - // decomp.setThreshold(0); - return static_cast(decomp.rank()); - } - - // m*a==b - template - static void solve(DV&a, DM const&m, V const& b){ - //a = m.colPivHouseholderQr().solve(b); - a = m.fullPivLu().solve(b); - } - template - static bool solve_and_check(DV&a, DM const&m, V const& b){ - //a = m.colPivHouseholderQr().solve(b); - a = m.fullPivLu().solve(b); - return b.isApprox(m*a); - } - - static Dynamic_matrix basis(Dynamic_matrix const&m){ - return m.fullPivLu().image(m); - } - - template static Vector homogeneous_add(Vec1 const&a,Vec2 const&b){ - //TODO: use compile-time size when available - int d=a.size(); - Vector v(d); - v << b[d-1]*a.topRows(d-1)+a[d-1]*b.topRows(d-1), a[d-1]*b[d-1]; - return v; - } - - template static Vector homogeneous_sub(Vec1 const&a,Vec2 const&b){ - int d=a.size(); - Vector v(d); - v << b[d-1]*a.topRows(d-1)-a[d-1]*b.topRows(d-1), a[d-1]*b[d-1]; - return v; - } - - template static std::pair homogeneous_dot_product(Vec1 const&a,Vec2 const&b){ - int d=a.size(); - return make_pair(a.topRows(d-1).dot(b.topRows(d-1)), a[d-1]*b[d-1]); - } - -}; -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h deleted file mode 100644 index 3636996f..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_LA_EIGEN_CONSTRUCTORS_H -#define CGAL_LA_EIGEN_CONSTRUCTORS_H -#include - -#if defined(BOOST_MSVC) -# pragma warning(push) -# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I' - // http://lists.boost.org/boost-users/2014/11/83291.php -#endif - -#ifndef CGAL_EIGEN3_ENABLED -#error Requires Eigen -#endif -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace CGAL { - template struct Construct_eigen { - typedef Vector_ result_type; - typedef typename Vector_::Scalar NT; - - private: - static void check_dim(int CGAL_assertion_code(d)){ - CGAL_assertion_code(int m = result_type::MaxSizeAtCompileTime;) - CGAL_assertion((m == Eigen::Dynamic) || (d <= m)); - } - public: - - struct Dimension { - // Initialize with NaN if possible? - result_type operator()(int d) const { - check_dim(d); - return result_type(d); - } - }; - - struct Iterator { - template - result_type operator()(int d,Iter const& f,Iter const& e) const { - check_dim(d); - CGAL_assertion(d==std::distance(f,e)); - result_type a(d); - // TODO: check the right way to do this - std::copy(f,e,&a[0]); - return a; - } - }; - -#if 0 - struct Iterator_add_one { - template - result_type operator()(int d,Iter const& f,Iter const& e) const { - check_dim(d); - CGAL_assertion(d==std::distance(f,e)+1); - result_type a(d); - std::copy(f,e,&a[0]); - a[d-1]=1; - return a; - } - }; -#endif - - struct Iterator_and_last { - template - result_type operator()(int d,Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const { - check_dim(d); - CGAL_assertion(d==std::distance(f,e)+1); - result_type a(d); - std::copy(f,e,&a[0]); - a[d-1]=CGAL_FORWARD(T,t); - return a; - } - }; - -#ifdef CGAL_CXX11 - struct Initializer_list { - // Fix T==NT? - template - result_type operator()(std::initializer_list l) const { - return Iterator()(l.size(),l.begin(),l.end()); - } - }; -#endif - - struct Values { -#ifdef CGAL_CXX11 - // TODO avoid going through Initializer_list which may cause extra copies. Possibly use forward_as_tuple. - template - result_type operator()(U&&...u) const { - check_dim(sizeof...(U)); // TODO: use static_assert - return Initializer_list()({forward_safe(u)...}); - } -#else - -#define CGAL_CODE(Z,N,_) result_type operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ - check_dim(N); \ - result_type a(N); \ - a << BOOST_PP_ENUM_PARAMS(N,t); \ - return a; \ -} -BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) -#undef CGAL_CODE - -#endif - }; - - struct Values_divide { -#ifdef CGAL_CXX11 - template - result_type operator()(H const&h,U&&...u) const { - check_dim(sizeof...(U)); // TODO: use static_assert - return Initializer_list()({Rational_traits().make_rational(std::forward(u),h)...}); - } -#else - -#define CGAL_VAR(Z,N,_) ( Rational_traits().make_rational( t##N ,h) ) -#define CGAL_CODE(Z,N,_) template result_type \ - operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ - check_dim(N); \ - result_type a(N); \ - a << BOOST_PP_ENUM(N,CGAL_VAR,); \ - return a; \ - } - BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) -#undef CGAL_CODE -#undef CGAL_VAR - -#endif - }; - }; -} -#if defined(BOOST_MSVC) -# pragma warning(pop) -#endif - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h deleted file mode 100644 index 9ecc2b63..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_LAZY_CARTESIAN_H -#define CGAL_KERNEL_D_LAZY_CARTESIAN_H - -#include -#include -#include -#include -#include -#include -#include - -namespace CGAL { - -template -struct Nth_iterator_element : private Store_kernel { - Nth_iterator_element(){} - Nth_iterator_element(K const&k):Store_kernel(k){} - typedef typename Get_type::value_tag>::type result_type; - template result_type operator()(CGAL_FORWARDABLE(U) u, int i) const { - typename Get_functor >::type ci(this->kernel()); - return *cpp0x::next(ci(CGAL_FORWARD(U,u),Begin_tag()),i); - } -}; - //typedef typename Functor::nth_element>::type nth_elem; -template::has_nth_element> -struct Select_nth_element_functor { - typedef Nth_iterator_element type; -}; -template -struct Select_nth_element_functor : - Get_functor::nth_element> {}; - -namespace internal { - template - struct Lazy_construction_maybe_nt { - typedef Lazy_construction type; - }; - template - struct Lazy_construction_maybe_nt { - typedef Lazy_construction_nt type; - }; -} - -template -struct Lazy_cartesian_types -{ - typedef typename typeset_intersection< - typename AK_::Object_list, - typename EK_::Object_list - >::type Object_list; - - typedef typename typeset_intersection< - typename AK_::Iterator_list, - typename EK_::Iterator_list - >::type Iterator_list; - - template ::type> struct Type {}; - template struct Type { - typedef Lazy< - typename Get_type::type, - typename Get_type::type, - typename Get_type::type, - E2A_> type; - }; - template struct Type { - typedef CGAL::Lazy_exact_nt::type> type; - }; - - template struct Iterator { - typedef typename iterator_tag_traits::value_tag Vt; - typedef typename Type::type V; - typedef typename Select_nth_element_functor::type AF; - typedef typename Select_nth_element_functor::type EF; - - typedef typename internal::Lazy_construction_maybe_nt< - Kernel_, AF, EF, is_NT_tag::value - >::type nth_elem; - - typedef Iterator_from_indices< - const typename Type::container>::type, - const V, V, nth_elem - > type; - }; -}; - -template -struct Lazy_cartesian : Dimension_base, - Lazy_cartesian_types > -{ - //CGAL_CONSTEXPR Lazy_cartesian(){} - //CGAL_CONSTEXPR Lazy_cartesian(int d):Base_(d){} - - //TODO: Do we want to store an AK and an EK? Or just references? - //FIXME: references would be better I guess. - //TODO: In any case, make sure that we don't end up storing this kernel for - //nothing (it is not empty but references empty kernels or something) - AK_ ak; EK_ ek; - AK_ const& approximate_kernel()const{return ak;} - EK_ const& exact_kernel()const{return ek;} - - typedef Lazy_cartesian Self; - typedef Lazy_cartesian_types Base; - //typedef typename Default::Get::type Kernel; - typedef Self Kernel; - typedef AK_ Approximate_kernel; - typedef EK_ Exact_kernel; - typedef E2A_ E2A; - typedef Approx_converter C2A; - typedef Exact_converter C2E; - - typedef typename Exact_kernel::Rep_tag Rep_tag; - typedef typename Exact_kernel::Kernel_tag Kernel_tag; - typedef typename Exact_kernel::Default_ambient_dimension Default_ambient_dimension; - typedef typename Exact_kernel::Max_ambient_dimension Max_ambient_dimension; - //typedef typename Exact_kernel::Flat_orientation Flat_orientation; - // Check that Approximate_kernel agrees with all that... - - template::type> struct Functor { - typedef Null_functor type; - }; - //FIXME: what do we do with D here? - template struct Functor { - typedef typename Get_functor::type FA; - typedef typename Get_functor::type FE; - typedef Filtered_predicate2 type; - }; - template struct Functor { - typedef typename Get_functor::type FA; - typedef typename Get_functor::type FE; - typedef Lazy_construction_nt type; - }; - template struct Functor { - typedef typename Get_functor::type FA; - typedef typename Get_functor::type FE; - typedef Lazy_construction type; - }; - - //typedef typename Iterator::type Point_cartesian_const_iterator; - //typedef typename Iterator::type Vector_cartesian_const_iterator; - - template - struct Construct_iter : private Store_kernel { - Construct_iter(){} - Construct_iter(Kernel const&k):Store_kernel(k){} - //FIXME: pass the kernel to the functor in the iterator - typedef U result_type; - template - result_type operator()(T const& t,Begin_tag)const{ - return result_type(t,0,this->kernel()); - } - template - result_type operator()(T const& t,End_tag)const{ - return result_type(t,Self().dimension(),this->kernel()); - } - }; - template struct Functor { - typedef Construct_iter::type>::type> type; - }; - - - //TODO: what about other functors of the Misc category? - // for Point_dimension, we should apply it to the approximate point - // for printing, we should??? just not do printing this way? -}; - - -} //namespace CGAL - -#endif // CGAL_KERNEL_D_LAZY_CARTESIAN_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h deleted file mode 100644 index 6d9f070f..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_TYPE_AFF_TRANSFORMATION_H -#define CGAL_KD_TYPE_AFF_TRANSFORMATION_H -#include -#include -#include - -// Dummy, that's all the Kernel_d concept requires, so a useful class will wait. - -namespace CGAL { -template -struct Aff_transformation { - typedef R_ R; -}; -namespace CartesianDKernelFunctors { -template struct Construct_aff_transformation { - CGAL_FUNCTOR_INIT_IGNORE(Construct_aff_transformation) - typedef R_ R; - typedef typename Get_type::type result_type; -#ifdef CGAL_CXX11 - template - result_type operator()(T&&...)const{return result_type();} -#else - result_type operator()()const{ - return result_type(); - } -#define CGAL_CODE(Z,N,_) template \ - result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const& BOOST_PP_INTERCEPT))const{ \ - return result_type(); \ - } - BOOST_PP_REPEAT_FROM_TO(1, 9, CGAL_CODE, _ ) -#undef CGAL_CODE - -#endif -}; -} -CGAL_KD_DEFAULT_TYPE(Aff_transformation_tag,(CGAL::Aff_transformation),(),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_aff_transformation),(Aff_transformation_tag),()); - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h deleted file mode 100644 index 14e35b01..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_TYPE_HYPERPLANE_H -#define CGAL_KD_TYPE_HYPERPLANE_H -#include -#include -#include -#include -#include -namespace CGAL { -template class Hyperplane { - typedef typename Get_type::type FT_; - typedef typename Get_type::type Vector_; - Vector_ v_; - FT_ s_; - - public: - Hyperplane(Vector_ const&v, FT_ const&s): v_(v), s_(s) {} - // TODO: Add a piecewise constructor? - - Vector_ const& orthogonal_vector()const{return v_;} - FT_ translation()const{return s_;} -}; -namespace CartesianDKernelFunctors { -template struct Construct_hyperplane : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_hyperplane) - typedef typename Get_type::type result_type; - typedef typename Get_type::type Point; - typedef typename Get_type::type Vector; - typedef typename Get_type::type FT; - private: - struct One { - typedef int result_type; - templateint const& operator()(T const&)const{ - static const int one = 1; - return one; - } - }; - public: - - result_type operator()(Vector const&a, FT const&b)const{ - return result_type(a,b); - } - // Not really needed - result_type operator()()const{ - typename Get_functor >::type cv(this->kernel()); - return result_type(cv(),0); - } - - template - result_type through(Iter f, Iter e)const{ - typedef typename R_::LA LA; - typedef typename R_::Default_ambient_dimension D1; - typedef typename R_::Max_ambient_dimension D2; - typedef typename Increment_dimension::type D1i; - typedef typename Increment_dimension::type D2i; - - typedef Eigen::Matrix::value, Eigen_dimension::value, - Eigen::ColMajor|Eigen::AutoAlign, Eigen_dimension::value, Eigen_dimension::value> Matrix; - typedef Eigen::Matrix::value, 1, - Eigen::ColMajor|Eigen::AutoAlign, Eigen_dimension::value, 1> Vec; - typename Get_functor::type c(this->kernel()); - typename Get_functor >::type cv(this->kernel()); - typename Get_functor::type pd(this->kernel()); - - Point const& p0=*f; - int d = pd(p0); - Matrix m(d,d+1); - for(int j=0;j lu(m); - Vec res = lu.kernel().col(0); - return this->operator()(cv(d,LA::vector_begin(res),LA::vector_end(res)-1),res(d)); - } - template - result_type operator()(Iter f, Iter e, Point const&p, CGAL::Oriented_side s=ON_ORIENTED_BOUNDARY)const{ - result_type ret = through(f, e); - // I don't really like using ON_ORIENTED_BOUNDARY to mean that we don't care, we might as well not pass 'p' at all. - if (s == ON_ORIENTED_BOUNDARY) - return ret; - typename Get_functor::type os(this->kernel()); - CGAL::Oriented_side o = os(ret, p); - if (o == ON_ORIENTED_BOUNDARY || o == s) - return ret; - typename Get_functor::type ov(this->kernel()); - typename Get_functor >::type cv(this->kernel()); - return this->operator()(ov(ret.orthogonal_vector()), -ret.translation()); - } -}; -template struct Orthogonal_vector { - CGAL_FUNCTOR_INIT_IGNORE(Orthogonal_vector) - typedef typename Get_type::type Hyperplane; - typedef typename Get_type::type const& result_type; - result_type operator()(Hyperplane const&s)const{ - return s.orthogonal_vector(); - } -}; -template struct Hyperplane_translation { - CGAL_FUNCTOR_INIT_IGNORE(Hyperplane_translation) - typedef typename Get_type::type Hyperplane; - typedef typename Get_type::type result_type; - // TODO: Is_exact? - result_type operator()(Hyperplane const&s)const{ - return s.translation(); - } -}; -template struct Value_at : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Value_at) - typedef typename Get_type::type Hyperplane; - typedef typename Get_type::type Vector; - typedef typename Get_type::type Point; - typedef typename Get_type::type FT; - typedef FT result_type; - typedef typename Get_functor::type Dot; - typedef typename Get_functor::type P2V; - result_type operator()(Hyperplane const&h, Point const&p)const{ - Dot dot(this->kernel()); - P2V p2v(this->kernel()); - return dot(h.orthogonal_vector(),p2v(p)); - // Use Orthogonal_vector to make it generic? - // Copy the code from Scalar_product to avoid p2v? - } -}; -} -//TODO: Add a condition that the hyperplane type is the one from this file. -CGAL_KD_DEFAULT_TYPE(Hyperplane_tag,(CGAL::Hyperplane),(Vector_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_hyperplane),(Vector_tag,Hyperplane_tag),(Opposite_vector_tag,Oriented_side_tag)); -CGAL_KD_DEFAULT_FUNCTOR(Orthogonal_vector_tag,(CartesianDKernelFunctors::Orthogonal_vector),(Vector_tag,Hyperplane_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Hyperplane_translation_tag,(CartesianDKernelFunctors::Hyperplane_translation),(Hyperplane_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Value_at_tag,(CartesianDKernelFunctors::Value_at),(Point_tag,Vector_tag,Hyperplane_tag),(Scalar_product_tag,Point_to_vector_tag)); -} // namespace CGAL -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h deleted file mode 100644 index d053f351..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNELD_TYPES_ISO_BOX_H -#define CGAL_KERNELD_TYPES_ISO_BOX_H -#include -#include -#include -#include -#include -namespace CGAL { -template class Iso_box { - typedef typename Get_type::type FT_; - typedef typename Get_type::type Point_; - typedef std::pair Data_; - Data_ data; - public: - Iso_box(){} - Iso_box(Point_ const&a, Point_ const&b): data(a,b) {} - Point_ min BOOST_PREVENT_MACRO_SUBSTITUTION ()const{ - return data.first; - } - Point_ max BOOST_PREVENT_MACRO_SUBSTITUTION ()const{ - return data.second; - } -}; -namespace CartesianDKernelFunctors { - template struct Construct_iso_box : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_iso_box) - typedef typename Get_type::type result_type; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_functor >::type Cp_; - typedef typename Get_functor >::type Ci_; - result_type operator()(Point const&a, Point const&b)const{ - Cp_ cp(this->kernel()); - Ci_ ci(this->kernel()); - return result_type(cp( - make_transforming_pair_iterator(ci(a,Begin_tag()), ci(b,Begin_tag()), Min()), - make_transforming_pair_iterator(ci(a,End_tag()), ci(b,End_tag()), Min())), - cp( - make_transforming_pair_iterator(ci(a,Begin_tag()), ci(b,Begin_tag()), Max()), - make_transforming_pair_iterator(ci(a,End_tag()), ci(b,End_tag()), Max()))); - } - }; - - template struct Construct_min_vertex { - CGAL_FUNCTOR_INIT_IGNORE(Construct_min_vertex) - typedef typename Get_type::type argument_type; - //TODO: make result_type a reference - typedef typename Get_type::type result_type; - result_type operator()(argument_type const&b)const{ - return b.min BOOST_PREVENT_MACRO_SUBSTITUTION (); - } - }; - template struct Construct_max_vertex { - CGAL_FUNCTOR_INIT_IGNORE(Construct_max_vertex) - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - result_type operator()(argument_type const&b)const{ - return b.max BOOST_PREVENT_MACRO_SUBSTITUTION (); - } - }; -} -//TODO (other types as well) only enable these functors if the Iso_box type is the one defined in this file... -CGAL_KD_DEFAULT_TYPE(Iso_box_tag,(CGAL::Iso_box),(Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_iso_box),(Iso_box_tag,Point_tag),(Construct_ttag,Construct_ttag)); -CGAL_KD_DEFAULT_FUNCTOR(Construct_min_vertex_tag,(CartesianDKernelFunctors::Construct_min_vertex),(Iso_box_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_max_vertex_tag,(CartesianDKernelFunctors::Construct_max_vertex),(Iso_box_tag),()); -} // namespace CGAL - -#endif // CGAL_KERNELD_TYPES_ISO_BOX_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h deleted file mode 100644 index 6a09571c..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNELD_TYPES_LINE_H -#define CGAL_KERNELD_TYPES_LINE_H -#include -#include -#include -namespace CGAL { -template class Line { - typedef typename Get_type::type FT_; - typedef typename Get_type::type Point_; - typedef std::pair Data_; - Data_ data; - public: - Line(){} - Line(Point_ const&a, Point_ const&b): data(a,b) {} - Point_ point(int i)const{ - if(i==0) return data.first; - if(i==1) return data.second; - throw "not implemented"; - } - Line opposite()const{ - return Line(data.second,data.first); - } -}; -namespace CartesianDKernelFunctors { - template struct Construct_line : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_line) - typedef typename Get_type::type result_type; - typedef typename Get_type::type Point; - typedef typename Get_type::type Vector; - typedef typename Get_functor::type Tp_; - //typedef typename Get_functor::type Dp_; - //typedef typename Get_functor::type Sv_; - result_type operator()(Point const&a, Point const&b)const{ - return result_type(a,b); - } - result_type operator()(Point const&a, typename First_if_different::Type const&b)const{ - Tp_ tp(this->kernel()); - return result_type(a,tp(a,b)); - } - }; -} -CGAL_KD_DEFAULT_TYPE(Line_tag,(CGAL::Line),(Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_line),(Line_tag,Point_tag,Vector_tag),(Translated_point_tag)); - -} // namespace CGAL - -#endif // CGAL_KERNELD_TYPES_LINE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h deleted file mode 100644 index be845e76..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNELD_TYPES_RAY_H -#define CGAL_KERNELD_TYPES_RAY_H -#include -#include -#include -namespace CGAL { -template class Ray { - typedef typename Get_type::type FT_; - typedef typename Get_type::type Point_; - typedef typename Get_type::type Vector_; - typedef std::pair Data_; - Data_ data; - public: - Ray(){} - Ray(Point_ const&a, Vector_ const&b): data(a,b) {} - Point_ source()const{ - return data.first; - } - // FIXME: return a R_::Direction? - Vector_ direction()const{ - return data.second; - } -}; -namespace CartesianDKernelFunctors { - template struct Construct_ray : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_ray) - typedef typename Get_type::type result_type; - typedef typename Get_type::type Point; - typedef typename Get_type::type Vector; - typedef typename Get_functor::type Dp_; - //typedef typename Get_functor::type Tp_; - //typedef typename Get_functor::type Sv_; - result_type operator()(Point const&a, Vector const&b)const{ - return result_type(a,b); - } - result_type operator()(Point const&a, typename First_if_different::Type const&b)const{ - Dp_ dp(this->kernel()); - return result_type(a,dp(b,a)); - } - }; -} -CGAL_KD_DEFAULT_TYPE(Ray_tag,(CGAL::Ray),(Point_tag,Vector_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_ray),(Point_tag,Ray_tag,Vector_tag),(Difference_of_points_tag)); - -} // namespace CGAL - -#endif // CGAL_KERNELD_TYPES_RAY_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h deleted file mode 100644 index 38361c2b..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNELD_SEGMENTD_H -#define CGAL_KERNELD_SEGMENTD_H -#include -#include -#include -namespace CGAL { -template class Segment { - typedef typename Get_type::type FT_; - typedef typename Get_type::type Point_; - //typedef typename R_::Vector Vector_; - //typedef typename Get_functor >::type Cv_; -// typedef typename R_::Squared_distance Csd_; - typedef std::pair Data_; - Data_ data; - public: - //typedef Segmentd Segment; -#ifdef CGAL_CXX11 - //FIXME: don't forward directly, piecewise_constuct should call the point construction functor (I guess? or is it unnecessary?) - template::type...>,std::tuple>::value>::type> - Segment(U&&...u):data(std::forward(u)...){} -#else - Segment(){} - Segment(Point_ const&a, Point_ const&b): data(a,b) {} - //template - //Segment(A const&,T1 const&t1,T2 const&t2) -#endif - Point_ source()const{return data.first;} - Point_ target()const{return data.second;} - Point_ operator[](int i)const{ - if((i%2)==0) - return source(); - else - return target(); - } - Segment opposite()const{ - return Segment(target(),source()); - } - //Vector_ vector()const{ - // return Cv_()(data.first,data.second); - //} -// FT_ squared_length()const{ -// return Csd_()(data.first,data.second); -// } -}; - -namespace CartesianDKernelFunctors { - -template struct Construct_segment : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_segment) - typedef R_ R; - typedef typename Get_type::type Point; - typedef typename Get_type::type Segment; - typedef typename Get_functor >::type CP; - typedef Segment result_type; - result_type operator()(Point const&a, Point const&b)const{ - return result_type(a,b); - } - // Not really needed, especially since it forces us to store the kernel - result_type operator()()const{ - Point p = typename Get_functor >::type (this->kernel()) (); - return result_type (p, p); - } - // T should only be std::piecewise_construct_t, but we shouldn't fail if it doesn't exist. - template - result_type operator()(CGAL_FORWARDABLE(T),CGAL_FORWARDABLE(U) u,CGAL_FORWARDABLE(V) v)const{ - CP cp(this->kernel()); - result_type r = {{ - call_on_tuple_elements(cp, CGAL_FORWARD(U,u)), - call_on_tuple_elements(cp, CGAL_FORWARD(V,v)) }}; - return r; - } -}; - -// This should be part of Construct_point, according to Kernel_23 conventions -template struct Segment_extremity { - CGAL_FUNCTOR_INIT_IGNORE(Segment_extremity) - typedef R_ R; - typedef typename Get_type::type Point; - typedef typename Get_type::type Segment; - typedef Point result_type; - result_type operator()(Segment const&s, int i)const{ - if(i==0) return s.source(); - CGAL_assertion(i==1); - return s.target(); - } -#ifdef CGAL_CXX11 - result_type operator()(Segment &&s, int i)const{ - if(i==0) return std::move(s.source()); - CGAL_assertion(i==1); - return std::move(s.target()); - } -#endif -}; -} // CartesianDKernelFunctors - -CGAL_KD_DEFAULT_TYPE(Segment_tag,(CGAL::Segment),(Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_segment),(Segment_tag,Point_tag),(Construct_ttag)); -CGAL_KD_DEFAULT_FUNCTOR(Segment_extremity_tag,(CartesianDKernelFunctors::Segment_extremity),(Segment_tag,Point_tag),()); - -} // namespace CGAL - -#endif // CGAL_KERNELD_SEGMENTD_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h deleted file mode 100644 index 114410b4..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_TYPE_SPHERE_H -#define CGAL_KD_TYPE_SPHERE_H -#include -#include -namespace CGAL { -template class Sphere { - typedef typename Get_type::type FT_; - typedef typename Get_type::type Point_; - Point_ c_; - FT_ r2_; - - public: - Sphere(Point_ const&p, FT_ const&r2): c_(p), r2_(r2) {} - // TODO: Add a piecewise constructor? - - Point_ const& center()const{return c_;} - FT_ const& squared_radius()const{return r2_;} -}; - -namespace CartesianDKernelFunctors { -template struct Construct_sphere : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_sphere) - typedef typename Get_type::type result_type; - typedef typename Get_type::type Point; - typedef typename Get_type::type FT; - result_type operator()(Point const&a, FT const&b)const{ - return result_type(a,b); - } - // Not really needed - result_type operator()()const{ - typename Get_functor >::type cp(this->kernel()); - return result_type(cp(),0); - } - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type cc(this->kernel()); - typename Get_functor::type sd(this->kernel()); - - // It should be possible to avoid copying the center by moving this code to a constructor. - Point center = cc(f, e); - FT const& r2 = sd(center, *f); - return this->operator()(CGAL_MOVE(center), r2); - } -}; - -template struct Center_of_sphere : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Center_of_sphere) - typedef typename Get_type::type Sphere; - // No reference because of the second overload - typedef typename Get_type::type result_type; - - result_type const& operator()(Sphere const&s)const{ - return s.center(); - } - - template - result_type operator()(Iter b, Iter e)const{ - typename Get_functor >::type cs(this->kernel()); - return operator()(cs(b,e)); // computes the radius needlessly - } -}; - -template struct Squared_radius { - CGAL_FUNCTOR_INIT_IGNORE(Squared_radius) - typedef typename Get_type::type Sphere; - typedef typename Get_type::type const& result_type; - // TODO: Is_exact? - result_type operator()(Sphere const&s)const{ - return s.squared_radius(); - } -}; - -// FIXME: Move it to the generic functors, using the two above and conditional to the existence of sqrt(FT) -template struct Point_of_sphere : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Point_of_sphere) - typedef R_ R; - typedef typename Get_type::type FT; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_type::type Sphere; - typedef typename Get_functor >::type CP; - typedef typename Get_functor >::type CI; - typedef typename Get_functor::type PD; - typedef Point result_type; - typedef Sphere first_argument_type; - typedef int second_argument_type; - struct Trans : std::binary_function { - FT const& r_; int idx; bool sgn; - Trans (int n, FT const& r, bool b) : r_(r), idx(n), sgn(b) {} - FT operator()(FT const&x, int i)const{ - return (i == idx) ? sgn ? x + r_ : x - r_ : x; - } - }; - result_type operator()(Sphere const&s, int i)const{ - CI ci(this->kernel()); - PD pd(this->kernel()); - typedef boost::counting_iterator Count; - Point const&c = s.center(); - int d=pd(c); - bool last = (i == d); - FT r = sqrt(s.squared_radius()); - Trans t(last ? 0 : i, r, !last); - return CP(this->kernel())(make_transforming_pair_iterator(ci(c,Begin_tag()),Count(0),t),make_transforming_pair_iterator(ci(c,End_tag()),Count(d),t)); - } -}; -} -CGAL_KD_DEFAULT_TYPE(Sphere_tag,(CGAL::Sphere),(Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_sphere),(Sphere_tag,Point_tag),(Construct_ttag,Compute_point_cartesian_coordinate_tag,Squared_distance_tag,Squared_distance_to_origin_tag,Point_dimension_tag)); -CGAL_KD_DEFAULT_FUNCTOR(Center_of_sphere_tag,(CartesianDKernelFunctors::Center_of_sphere),(Sphere_tag,Point_tag),(Construct_ttag)); -CGAL_KD_DEFAULT_FUNCTOR(Squared_radius_tag,(CartesianDKernelFunctors::Squared_radius),(Sphere_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Point_of_sphere_tag,(CartesianDKernelFunctors::Point_of_sphere),(Sphere_tag,Point_tag),(Construct_ttag, Construct_ttag)); -} // namespace CGAL -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h deleted file mode 100644 index 1caf8701..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_TYPE_WP_H -#define CGAL_KD_TYPE_WP_H -#include -#include -namespace CGAL { -namespace KerD { -template class Weighted_point { - typedef typename Get_type::type FT_; - typedef typename Get_type::type Point_; - Point_ c_; - FT_ w_; - - public: - Weighted_point(Point_ const&p, FT_ const&w): c_(p), w_(w) {} - // TODO: Add a piecewise constructor? - - Point_ const& point()const{return c_;} - FT_ const& weight()const{return w_;} -}; -} - -namespace CartesianDKernelFunctors { -template struct Construct_weighted_point : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_weighted_point) - typedef typename Get_type::type result_type; - typedef typename Get_type::type Point; - typedef typename Get_type::type FT; - result_type operator()(Point const&a, FT const&b)const{ - return result_type(a,b); - } - // Not really needed - result_type operator()()const{ - typename Get_functor >::type cp(this->kernel()); - return result_type(cp(),0); - } -}; - -template struct Point_drop_weight { - CGAL_FUNCTOR_INIT_IGNORE(Point_drop_weight) - typedef typename Get_type::type argument_type; - typedef typename Get_type::type const& result_type; - // Returning a reference is fragile - - result_type operator()(argument_type const&s)const{ - return s.point(); - } -}; - -template struct Point_weight { - CGAL_FUNCTOR_INIT_IGNORE(Point_weight) - typedef typename Get_type::type argument_type; - typedef typename Get_type::type result_type; - - result_type operator()(argument_type const&s)const{ - return s.weight(); - } -}; - -template struct Power_distance : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Power_distance) - typedef typename Get_type::type first_argument_type; - typedef first_argument_type second_argument_type; - typedef typename Get_type::type result_type; - - result_type operator()(first_argument_type const&a, second_argument_type const&b)const{ - typename Get_functor::type pdw(this->kernel()); - typename Get_functor::type pw(this->kernel()); - typename Get_functor::type sd(this->kernel()); - return sd(pdw(a),pdw(b))-pw(a)-pw(b); - } -}; -template struct Power_distance_to_point : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Power_distance_to_point) - typedef typename Get_type::type first_argument_type; - typedef typename Get_type::type second_argument_type; - typedef typename Get_type::type result_type; - - result_type operator()(first_argument_type const&a, second_argument_type const&b)const{ - typename Get_functor::type pdw(this->kernel()); - typename Get_functor::type pw(this->kernel()); - typename Get_functor::type sd(this->kernel()); - return sd(pdw(a),b)-pw(a); - } -}; - -template struct Power_side_of_power_sphere : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Power_side_of_power_sphere) - typedef R_ R; - typedef typename Get_type::type result_type; - - template - result_type operator()(Iter const& f, Iter const& e, Pt const& p0) const { - typename Get_functor::type ptr(this->kernel()); - typename Get_functor::type pdw(this->kernel()); - typename Get_functor::type pw(this->kernel()); - return ptr ( - make_transforming_iterator (f, pdw), - make_transforming_iterator (e, pdw), - make_transforming_iterator (f, pw), - pdw (p0), - pw (p0)); - } -}; - -template struct In_flat_power_side_of_power_sphere : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(In_flat_power_side_of_power_sphere) - typedef R_ R; - typedef typename Get_type::type result_type; - - template - result_type operator()(Fo const& fo, Iter const& f, Iter const& e, Pt const& p0) const { - typename Get_functor::type ptr(this->kernel()); - typename Get_functor::type pdw(this->kernel()); - typename Get_functor::type pw(this->kernel()); - return ptr ( - fo, - make_transforming_iterator (f, pdw), - make_transforming_iterator (e, pdw), - make_transforming_iterator (f, pw), - pdw (p0), - pw (p0)); - } -}; - -// Construct a point at (weighted) distance 0 from all the input -template struct Power_center : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Power_center) - typedef typename Get_type::type WPoint; - typedef WPoint result_type; - typedef typename Get_type::type Point; - typedef typename Get_type::type FT; - template - result_type operator()(Iter f, Iter e)const{ - // 2*(x-y).c == (x^2-wx^2)-(y^2-wy^2) - typedef typename R_::LA LA; - typedef typename LA::Square_matrix Matrix; - typedef typename LA::Vector Vec; - typedef typename LA::Construct_vector CVec; - typename Get_functor::type c(this->kernel()); - typename Get_functor >::type cp(this->kernel()); - typename Get_functor::type pd(this->kernel()); - typename Get_functor::type sdo(this->kernel()); - typename Get_functor::type pdp(this->kernel()); - typename Get_functor::type pdw(this->kernel()); - typename Get_functor::type pw(this->kernel()); - typename Get_functor >::type cwp(this->kernel()); - - WPoint const& wp0 = *f; - Point const& p0 = pdw(wp0); - int d = pd(p0); - FT const& n0 = sdo(p0) - pw(wp0); - Matrix m(d,d); - Vec b = typename CVec::Dimension()(d); - // Write the point coordinates in lines. - int i; - for(i=0; ++f!=e; ++i) { - WPoint const& wp=*f; - Point const& p=pdw(wp); - FT const& np = sdo(p) - pw(wp); - for(int j=0;j),(Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_weighted_point),(Weighted_point_tag,Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Point_drop_weight_tag,(CartesianDKernelFunctors::Point_drop_weight),(Weighted_point_tag,Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Point_weight_tag,(CartesianDKernelFunctors::Point_weight),(Weighted_point_tag,Point_tag),()); -CGAL_KD_DEFAULT_FUNCTOR(Power_side_of_power_sphere_tag,(CartesianDKernelFunctors::Power_side_of_power_sphere),(Weighted_point_tag),(Power_side_of_power_sphere_raw_tag,Point_drop_weight_tag,Point_weight_tag)); -CGAL_KD_DEFAULT_FUNCTOR(In_flat_power_side_of_power_sphere_tag,(CartesianDKernelFunctors::In_flat_power_side_of_power_sphere),(Weighted_point_tag),(In_flat_power_side_of_power_sphere_raw_tag,Point_drop_weight_tag,Point_weight_tag)); -CGAL_KD_DEFAULT_FUNCTOR(Power_distance_tag,(CartesianDKernelFunctors::Power_distance),(Weighted_point_tag,Point_tag),(Squared_distance_tag,Point_drop_weight_tag,Point_weight_tag)); -CGAL_KD_DEFAULT_FUNCTOR(Power_distance_to_point_tag,(CartesianDKernelFunctors::Power_distance_to_point),(Weighted_point_tag,Point_tag),(Squared_distance_tag,Point_drop_weight_tag,Point_weight_tag)); -CGAL_KD_DEFAULT_FUNCTOR(Power_center_tag,(CartesianDKernelFunctors::Power_center),(Weighted_point_tag,Point_tag),(Compute_point_cartesian_coordinate_tag,Construct_ttag,Construct_ttag,Point_dimension_tag,Squared_distance_to_origin_tag,Point_drop_weight_tag,Point_weight_tag,Power_distance_to_point_tag)); -} // namespace CGAL -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h deleted file mode 100644 index 0ad9bb36..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_ARRAY_H -#define CGAL_VECTOR_ARRAY_H -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include - - - -namespace CGAL { - -// May not be safe to use with dim!=max_dim. -// In that case, we should store the real dim next to the array. -template struct Array_vector { - typedef NT_ NT; - typedef Dim_ Dimension; - typedef Max_dim_ Max_dimension; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef Array_vector< NT, D2, D3 > Other; - }; - template struct Property : boost::false_type {}; - - static const unsigned d_=Max_dim_::value; - CGAL_static_assertion(d_ != (unsigned)UNKNOWN_DIMENSION); - - typedef cpp0x::array Vector; - struct Construct_vector { - struct Dimension { - // Initialize with NaN if possible? - Vector operator()(unsigned CGAL_assertion_code(d)) const { - CGAL_assertion(d<=d_); - return Vector(); - } - }; - - struct Iterator { - template - Vector operator()(unsigned CGAL_assertion_code(d),Iter const& f,Iter const& e) const { - CGAL_assertion(d==(unsigned) std::distance(f,e)); - CGAL_assertion(d<=d_); - //TODO: optimize for forward iterators - Vector a; - std::copy(f,e,a.begin()); - return a; - } - }; - -#if 0 - struct Iterator_add_one { - template - Vector operator()(unsigned d,Iter const& f,Iter const& e) const { - CGAL_assertion(d==std::distance(f,e)+1); - CGAL_assertion(d<=d_); - //TODO: optimize - Vector a; - std::copy(f,e,a.begin()); - a.back()=1; - return a; - } - }; -#endif - - struct Iterator_and_last { - template - Vector operator()(unsigned CGAL_assertion_code(d),Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const { - CGAL_assertion(d==std::distance(f,e)+1); - CGAL_assertion(d<=d_); - //TODO: optimize for forward iterators - Vector a; - std::copy(f,e,a.begin()); - a.back()=CGAL_FORWARD(T,t); - return a; - } - }; - - struct Values { -#ifdef CGAL_CXX11 - template - Vector operator()(U&&...u) const { - static_assert(sizeof...(U)<=d_,"too many arguments"); - Vector a={{forward_safe(u)...}}; - return a; - } -#else - -#define CGAL_CODE(Z,N,_) Vector operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ - CGAL_assertion(N<=d_); \ - Vector a={{BOOST_PP_ENUM_PARAMS(N,t)}}; \ - return a; \ -} -BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) -#undef CGAL_CODE - -#endif - }; - - struct Values_divide { -#ifdef CGAL_CXX11 - template - Vector operator()(H const& h,U&&...u) const { - static_assert(sizeof...(U)<=d_,"too many arguments"); - Vector a={{Rational_traits().make_rational(std::forward(u),h)...}}; - return a; - } -#else - -#define CGAL_VAR(Z,N,_) Rational_traits().make_rational( t##N , h) -#define CGAL_CODE(Z,N,_) template Vector \ - operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ - CGAL_assertion(N<=d_); \ - Vector a={{BOOST_PP_ENUM(N,CGAL_VAR,_)}}; \ - return a; \ - } - BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) -#undef CGAL_CODE -#undef CGAL_VAR - -#endif - }; - }; - - typedef NT const* Vector_const_iterator; - static Vector_const_iterator vector_begin(Vector const&a){ - return &a[0]; - } - static Vector_const_iterator vector_end(Vector const&a){ - return &a[0]+d_; // Don't know the real size - } - static unsigned size_of_vector(Vector const&){ - return d_; // Don't know the real size - } - -}; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h deleted file mode 100644 index 954a3c1b..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_AVX4_H -#define CGAL_VECTOR_AVX4_H - -#if !defined __AVX__ || (__GNUC__ * 100 + __GNUC_MINOR__ < 408) -#error Requires AVX and gcc 4.8+ -#endif -#include - -#include -#include -#include // CGAL::Sign -#include // CGAL::sign - - - -namespace CGAL { - - struct Avx_vector_4 { - typedef double NT; - typedef Dimension_tag<4> Dimension; - typedef Dimension_tag<4> Max_dimension; - // No Rebind_dimension, this is a building block - template struct Property : boost::false_type {}; - template struct Property - : boost::true_type {}; - /* MAYBE? - template struct Property - : boost::true_type {}; - */ - template struct Property - : boost::true_type {}; - template struct Property - : boost::true_type {}; - template struct Property - : boost::true_type {}; - - typedef __m256d Vector; - struct Construct_vector { - struct Dimension { - // Initialize with NaN? - Vector operator()(unsigned d) const { - CGAL_assertion(d==4); - return Vector(); - } - }; - - struct Iterator { - template - Vector operator()(unsigned d,Iter const& f,Iter const& e) const { - CGAL_assertion(d==4); - double x0 = *f; - double x1 = *++f; - double x2 = *++f; - double x3 = *++f; - CGAL_assertion(++f==e); - Vector a = { x0, x1, x2, x3 }; - return a; - } - }; - - struct Iterator_and_last { - template - Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const { - CGAL_assertion(d==4); - double x0 = *f; - double x1 = *++f; - double x2 = *++f; - CGAL_assertion(++f==e); - Vector a = { x0, x1, x2, t }; - return a; - } - }; - - struct Values { - Vector operator()(double a,double b,double c,double d) const { - Vector r = { a, b, c, d }; - return r; - } - }; - - struct Values_divide { - Vector operator()(double h,double a,double b,double c,double d) const { - // {a,b,c,d}/{h,h,h,h} should be roughly the same - Vector r = { a/h, b/h, c/h, d/h }; - return r; - } - }; - }; - - public: - typedef double const* Vector_const_iterator; - static inline Vector_const_iterator vector_begin(Vector const&a){ - return (Vector_const_iterator)(&a); - } - static inline Vector_const_iterator vector_end(Vector const&a){ - return (Vector_const_iterator)(&a)+4; - } - static inline unsigned size_of_vector(Vector){ - return 4; - } - static inline double dot_product(__m256d x, __m256d y){ - __m256d p=x*y; - __m256d z=_mm256_hadd_pd(p,p); - return z[0]+z[2]; - } - private: - static inline __m256d avx_sym(__m256d x){ -#if 0 - return __builtin_shuffle(x,(__m256i){2,3,0,1}); -#else - return _mm256_permute2f128_pd(x,x,1); -#endif - } - static inline __m256d avx_left(__m256d x){ -#if 0 - return __builtin_shuffle(x,(__m256i){1,2,3,0}); -#else -#ifdef __AVX2__ - return _mm256_permute4x64_pd(x,1+2*4+3*16+0*64); -#else - __m256d s = _mm256_permute2f128_pd(x,x,1); - return _mm256_shuffle_pd(x,s,5); -#endif -#endif - } - static inline __m256d avx_right(__m256d x){ -#if 0 - return __builtin_shuffle(x,(__m256i){3,0,1,2}); -#else -#ifdef __AVX2__ - return _mm256_permute4x64_pd(x,3+0*4+1*16+2*64); -#else - __m256d s = _mm256_permute2f128_pd(x,x,1); - return _mm256_shuffle_pd(s,x,5); -#endif -#endif - } - static inline double avx_altprod(__m256d x, __m256d y){ - __m256d p=x*y; - __m256d z=_mm256_hsub_pd(p,p); - return z[0]+z[2]; - } - public: - static double - determinant_of_vectors(Vector a, Vector b, Vector c, Vector d) { - __m256d x=a*avx_left(b)-avx_left(a)*b; - __m256d yy=a*avx_sym(b); - __m256d y=yy-avx_sym(yy); - __m256d z0=x*avx_sym(c); - __m256d z1=avx_left(x)*c; - __m256d z2=y*avx_left(c); - __m256d z=z0+z1-z2; - return avx_altprod(z,avx_right(d)); - } - static CGAL::Sign - sign_of_determinant_of_vectors(Vector a, Vector b, Vector c, Vector d) { - return CGAL::sign(determinant_of_vectors(a,b,c,d)); - } - - private: - static inline __m256d avx3_right(__m256d x){ -#if 0 - return __builtin_shuffle(x,(__m256i){2,0,1,3}); // can replace 3 with anything -#else -#ifdef __AVX2__ - return _mm256_permute4x64_pd(x,2+0*4+1*16+3*64); -#else - __m256d s = _mm256_permute2f128_pd(x,x,1); - return _mm256_shuffle_pd(s,x,12); -#endif -#endif - } - public: - static inline double dot_product_omit_last(__m256d x, __m256d y){ - __m256d p=x*y; - __m128d q=_mm256_extractf128_pd(p,0); - double z=_mm_hadd_pd(q,q)[0]; - return z+p[2]; - } - // Note: without AVX2, is it faster than the scalar computation? - static double - determinant_of_vectors_omit_last(Vector a, Vector b, Vector c) { - __m256d x=a*avx3_right(b)-avx3_right(a)*b; - return dot_product_omit_last(c,avx3_right(x)); - } - static CGAL::Sign - sign_of_determinant_of_vectors_omit_last(Vector a, Vector b, Vector c) { - return CGAL::sign(determinant_of_vectors_omit_last(a,b,c)); - } - - }; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h deleted file mode 100644 index b8efbe28..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_DET_ITER_PTS_ITER_VEC_H -#define CGAL_VECTOR_DET_ITER_PTS_ITER_VEC_H -#include -#include -#include -#include - -namespace CGAL { - -template ::value, - bool = LA::template Property::value> -struct Add_determinant_of_iterator_to_points_from_iterator_to_vectors : LA { - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_iterator_to_vectors Other; - }; -}; - -template -struct Add_determinant_of_iterator_to_points_from_iterator_to_vectors - : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_iterator_to_vectors Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - // TODO: use std::minus, boost::bind, etc - template struct Minus_fixed { - T const& a; - Minus_fixed(T const&a_):a(a_){} - T operator()(T const&b)const{return b-a;} - }; - template - static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Minus_fixed f(a); - return LA::determinant_of_iterator_to_vectors(make_transforming_iterator(first,f),make_transforming_iterator(end,f)); - } - template - static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Minus_fixed f(a); - return LA::sign_of_determinant_of_iterator_to_vectors(make_transforming_iterator(first,f),make_transforming_iterator(end,f)); - } -}; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h deleted file mode 100644 index 71a31d81..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h +++ /dev/null @@ -1,211 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_DET_ITER_PTS_PTS_H -#define CGAL_VECTOR_DET_ITER_PTS_PTS_H -#include -#include - -namespace CGAL { - -template ::value, - bool = LA::template Property::value> -struct Add_determinant_of_iterator_to_points_from_points : LA { - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_points Other; - }; -}; - -//FIXME: Use variadics and boost so it works in any dimension. -template -struct Add_determinant_of_iterator_to_points_from_points -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_points Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; CGAL_assertion(++first==end); - return LA::determinant_of_points(a,b,c); - } - template - static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_points(a,b,c); - } -}; - -template -struct Add_determinant_of_iterator_to_points_from_points -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_points Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; CGAL_assertion(++first==end); - return LA::determinant_of_points(a,b,c,d); - } - template - static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_points(a,b,c,d); - } -}; - -template -struct Add_determinant_of_iterator_to_points_from_points -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_points Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; CGAL_assertion(++first==end); - return LA::determinant_of_points(a,b,c,d,e); - } - template - static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_points(a,b,c,d,e); - } -}; - -template -struct Add_determinant_of_iterator_to_points_from_points -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_points Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; ++first; - Vector const&f=*first; CGAL_assertion(++first==end); - return LA::determinant_of_points(a,b,c,d,e,f); - } - template - static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; ++first; - Vector const&f=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_points(a,b,c,d,e,f); - } -}; - -template -struct Add_determinant_of_iterator_to_points_from_points -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_points_from_points Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; ++first; - Vector const&f=*first; ++first; - Vector const&g=*first; CGAL_assertion(++first==end); - return LA::determinant_of_points(a,b,c,d,e,f,g); - } - template - static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; ++first; - Vector const&f=*first; ++first; - Vector const&g=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_points(a,b,c,d,e,f,g); - } -}; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h deleted file mode 100644 index f096d6c7..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_DET_ITER_VEC_VEC_H -#define CGAL_VECTOR_DET_ITER_VEC_VEC_H -#include -#include - -namespace CGAL { - -template ::value, - bool = LA::template Property::value> -struct Add_determinant_of_iterator_to_vectors_from_vectors : LA { - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; - }; -}; - -//FIXME: Use variadics and boost so it works in any dimension. -template -struct Add_determinant_of_iterator_to_vectors_from_vectors -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; CGAL_assertion(++first==end); - return LA::determinant_of_vectors(a,b); - } - template - static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_vectors(a,b); - } -}; - -template -struct Add_determinant_of_iterator_to_vectors_from_vectors -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; CGAL_assertion(++first==end); - return LA::determinant_of_vectors(a,b,c); - } - template - static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_vectors(a,b,c); - } -}; - -template -struct Add_determinant_of_iterator_to_vectors_from_vectors -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; CGAL_assertion(++first==end); - return LA::determinant_of_vectors(a,b,c,d); - } - template - static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_vectors(a,b,c,d); - } -}; - -template -struct Add_determinant_of_iterator_to_vectors_from_vectors -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; CGAL_assertion(++first==end); - return LA::determinant_of_vectors(a,b,c,d,e); - } - template - static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_vectors(a,b,c,d,e); - } -}; - -template -struct Add_determinant_of_iterator_to_vectors_from_vectors -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - template - static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; ++first; - Vector const&f=*first; CGAL_assertion(++first==end); - return LA::determinant_of_vectors(a,b,c,d,e,f); - } - template - static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ - Vector const&a=*first; ++first; - Vector const&b=*first; ++first; - Vector const&c=*first; ++first; - Vector const&d=*first; ++first; - Vector const&e=*first; ++first; - Vector const&f=*first; CGAL_assertion(++first==end); - return LA::sign_of_determinant_of_vectors(a,b,c,d,e,f); - } -}; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h deleted file mode 100644 index 7ddb73c3..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_DETPTS_H -#define CGAL_VECTOR_DETPTS_H -#include -#include - -namespace CGAL { - -template ::value, - bool = LA::template Property::value - && LA::template Property::value> -struct Add_determinant_of_points_from_vectors_and_minus : LA { - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_points_from_vectors_and_minus Other; - }; -}; - -//FIXME: Use variadics and boost so it works in any dimension. -template -struct Add_determinant_of_points_from_vectors_and_minus -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_points_from_vectors_and_minus Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT determinant_of_points(Vector const&a, Vector const&b, - Vector const&c){ - return LA::determinant_of_vectors(b-a,c-a); - } - static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, - Vector const&c){ - return LA::sign_of_determinant_of_vectors(b-a,c-a); - } -}; - -template -struct Add_determinant_of_points_from_vectors_and_minus -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_points_from_vectors_and_minus Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d){ - return LA::determinant_of_vectors(b-a,c-a,d-a); - } - static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d){ - return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a); - } -}; - -template -struct Add_determinant_of_points_from_vectors_and_minus -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_points_from_vectors_and_minus Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e){ - return LA::determinant_of_vectors(b-a,c-a,d-a,e-a); - } - static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e){ - return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a); - } -}; - -template -struct Add_determinant_of_points_from_vectors_and_minus -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_points_from_vectors_and_minus Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f){ - return LA::determinant_of_vectors(b-a,c-a,d-a,e-a,f-a); - } - static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f){ - return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a,f-a); - } -}; - -template -struct Add_determinant_of_points_from_vectors_and_minus -, Max_dim_, false, true> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef Add_determinant_of_points_from_vectors_and_minus Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f, - Vector const&g){ - return LA::determinant_of_vectors(b-a,c-a,d-a,e-a,f-a,g-a); - } - static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f, - Vector const&g){ - return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a,f-a,g-a); - } -}; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h deleted file mode 100644 index 64eafe69..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_DETVEC_SMALL_H -#define CGAL_VECTOR_DETVEC_SMALL_H -#include -#include -#include - -#define CGAL_ALLOWED_INCLUSION 1 - -#define CGAL_CLASS Add_determinant_of_vectors_small_dim -#define CGAL_TAG Has_determinant_of_vectors_tag -#define CGAL_FUNC determinant_of_vectors -#define CGAL_SIGN_FUNC sign_of_determinant_of_vectors -#define CGAL_SHIFT 0 - -#include - -#undef CGAL_CLASS -#undef CGAL_TAG -#undef CGAL_FUNC -#undef CGAL_SIGN_FUNC -#undef CGAL_SHIFT - -#define CGAL_CLASS Add_determinant_of_vectors_omit_last_small_dim -#define CGAL_TAG Has_determinant_of_vectors_omit_last_tag -#define CGAL_FUNC determinant_of_vectors_omit_last -#define CGAL_SIGN_FUNC sign_of_determinant_of_vectors_omit_last -#define CGAL_SHIFT 1 - -#include - -#undef CGAL_CLASS -#undef CGAL_TAG -#undef CGAL_FUNC -#undef CGAL_SIGN_FUNC -#undef CGAL_SHIFT - -#undef CGAL_ALLOWED_INCLUSION - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h deleted file mode 100644 index b4856742..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_ALLOWED_INCLUSION -#error Must not include this header directly -#endif -#if !defined(CGAL_TAG) \ - || ! defined(CGAL_CLASS) \ - || ! defined(CGAL_FUNC) \ - || ! defined(CGAL_SIGN_FUNC) \ - || ! defined(CGAL_SHIFT) - -#error Forgot one macro -#endif - -namespace CGAL { - -template ::value> -struct CGAL_CLASS : LA { - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef CGAL_CLASS Other; - }; -}; - -template -struct CGAL_CLASS -, Max_dim_, false> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef CGAL_CLASS Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT CGAL_FUNC(Vector const&a, Vector const&b){ - return CGAL::determinant_of_vectors(a,b); - } - template - static Sign CGAL_SIGN_FUNC(V1 const&a, V2 const&b){ - return CGAL::sign_of_determinant_of_vectors(a,b); - } -}; - -template -struct CGAL_CLASS -, Max_dim_, false> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef CGAL_CLASS Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT CGAL_FUNC(Vector const&a, Vector const&b, - Vector const&c){ - return CGAL::determinant_of_vectors(a,b,c); - } - static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, - Vector const&c){ - return CGAL::sign_of_determinant_of_vectors(a,b,c); - } -}; - -template -struct CGAL_CLASS -, Max_dim_, false> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef CGAL_CLASS Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT CGAL_FUNC(Vector const&a, Vector const&b, - Vector const&c, Vector const&d){ - return CGAL::determinant_of_vectors(a,b,c,d); - } - static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, - Vector const&c, Vector const&d){ - return CGAL::sign_of_determinant_of_vectors(a,b,c,d); - } -}; - -template -struct CGAL_CLASS -, Max_dim_, false> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef CGAL_CLASS Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT CGAL_FUNC(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e){ - return CGAL::determinant_of_vectors(a,b,c,d,e); - } - static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e){ - return CGAL::sign_of_determinant_of_vectors(a,b,c,d,e); - } -}; - -template -struct CGAL_CLASS -, Max_dim_, false> : LA { - typedef typename LA::NT NT; - typedef typename LA::Vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef typename LA::template Rebind_dimension LA2; - typedef CGAL_CLASS Other; - }; - template struct Property : LA::template Property

{}; - template struct Property : - boost::true_type {}; - - static NT CGAL_FUNC(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f){ - return CGAL::determinant_of_vectors(a,b,c,d,e,f); - } - static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f){ - return CGAL::sign_of_determinant_of_vectors(a,b,c,d,e,f); - } -}; - -} diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h deleted file mode 100644 index d4cfeeb1..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KD_MIX_VECTOR_H -#define CGAL_KD_MIX_VECTOR_H -#include -namespace CGAL { - -template -struct Mix_vector -: Dynamic_::template Rebind_dimension::Other -{ - template - struct Rebind_dimension { - typedef Mix_vector Other; - }; -}; - -template -struct Mix_vector, Max_dim_> -: Static_::template Rebind_dimension, Max_dim_>::Other -{ - template - struct Rebind_dimension { - typedef Mix_vector Other; - }; -}; -} -#endif - diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h deleted file mode 100644 index 2a75385c..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_SSE2_H -#define CGAL_VECTOR_SSE2_H - -// Check what needs adapting for clang, intel and microsoft -#if !defined __SSE2__ || (__GNUC__ * 100 + __GNUC_MINOR__ < 408) -#error Requires SSE2 and gcc 4.8+ -#endif -#include // FIXME: other platforms call it differently - -#include -#include -#include // CGAL::Sign -#include // CGAL::sign - - - -namespace CGAL { - - struct Sse_vector_2 { - typedef double NT; - typedef Dimension_tag<2> Dimension; - typedef Dimension_tag<2> Max_dimension; - // No Rebind_dimension, this is a building block - template struct Property : boost::false_type {}; - template struct Property - : boost::true_type {}; - /* MAYBE? - template struct Property - : boost::true_type {}; - */ - template struct Property - : boost::true_type {}; - template struct Property - : boost::true_type {}; - - typedef __m128d Vector; - struct Construct_vector { - struct Dimension { - // Initialize with NaN? - Vector operator()(unsigned d) const { - CGAL_assertion(d==2); - return Vector(); - } - }; - - struct Iterator { - template - Vector operator()(unsigned d,Iter const& f,Iter const& e) const { - CGAL_assertion(d==2); - double x0 = *f; - double x1 = *++f; - CGAL_assertion(++f==e); - Vector a = { x0, x1 }; - return a; - } - }; - - struct Iterator_and_last { - template - Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const { - CGAL_assertion(d==2); - Vector a = { *f, t }; - CGAL_assertion(++f==e); - return a; - } - }; - - struct Values { - Vector operator()(double a,double b) const { - Vector r = { a, b }; - return r; - } - }; - - struct Values_divide { - Vector operator()(double h,double a,double b) const { - // {a,b}/{h,h} is probably slower - Vector r = { a/h, b/h }; - return r; - } - }; - }; - - typedef double const* Vector_const_iterator; - static inline Vector_const_iterator vector_begin(Vector const&a){ - return (Vector_const_iterator)(&a); - } - static inline Vector_const_iterator vector_end(Vector const&a){ - return (Vector_const_iterator)(&a)+2; - } - static inline unsigned size_of_vector(Vector){ - return 2; - } - public: - - static double determinant_of_vectors(Vector a, Vector b) { - __m128d c = _mm_shuffle_pd (b, b, 1); // b1, b0 - __m128d d = a * c; // a0*b1, a1*b0 -#ifdef __SSE3__ - __m128d e = _mm_hsub_pd (d, d); - return e[0]; -#else - return d[0]-d[1]; -#endif - } - static CGAL::Sign sign_of_determinant_of_vectors(Vector a, Vector b) { - return CGAL::sign(determinant_of_vectors(a,b)); - } - - static double dot_product(Vector a,Vector b){ -#ifdef __SSE4_1__ - return _mm_dp_pd (a, b, 1+16+32)[0]; -#else - __m128d p = a * b; -#if defined __SSE3__ - __m128d s = _mm_hadd_pd (p, p); - return s[0]; -#else - return p[0]+p[1]; -#endif -#endif - }; - }; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h deleted file mode 100644 index b85a3734..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_2INT_H -#define CGAL_VECTOR_2INT_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -// What are the pros and cons of having NT be int vs double? - -namespace CGAL { - struct Vector_2_int_prop1 { - typedef double NT; // try lying a bit - typedef int32_t NT1; // what is really stored - typedef int32_t NT1b; // slightly longer - typedef int_fast64_t NT2; // longer type for computations - typedef int_fast64_t NT2b; // slightly longer - bool check_limits(int32_t x){return std::abs(x)<(1<<30);} - // TODO: find nice bounds - }; -#ifdef __SIZEOF_INT128__ - struct Vector_2_int_prop2 { - typedef double NT; - typedef int32_t NT1; - typedef int_fast64_t NT1b; - typedef int_fast64_t NT2; - typedef __int128 NT2b; - bool check_limits(int32_t){return true;} - // take a template/int64_t input and still check the limits? - }; - struct Vector_2_int_prop3 { - typedef long double NT; - typedef int64_t NT1; - typedef int64_t NT1b; - typedef __int128 NT2; - typedef __int128 NT2b; - enum { has_limit=true }; - bool check_limits(int32_t x){return std::abs(x)<(1L<<62);} - // TODO: find nice bounds - }; -#endif - - template - struct Vector_2_int : Prop { - using typename Prop::NT; - using typename Prop::NT1; - using typename Prop::NT1b; - using typename Prop::NT2; - using typename Prop::NT2b; - using Prop::check_limits; - - typedef Dimension_tag<2> Dimension; - typedef Dimension_tag<2> Max_dimension; - // No Rebind_dimension, this is a building block - template struct Property : boost::false_type {}; - //template struct Property - // : boost::true_type {}; - template struct Property - : boost::true_type {}; - //template struct Property - // : boost::true_type {}; - // Advertise somehow that the sign_of_determinant* are exact? - - typedef cpp0x::array Vector; - struct Construct_vector { - struct Dimension { - Vector operator()(unsigned d) const { - CGAL_assertion(d==2); - return Vector(); - } - }; - - // TODO (for all constructors): check that input fits in NT1... - struct Iterator { - template - Vector operator()(unsigned d,Iter const& f,Iter const& e) const { - CGAL_assertion(d==2); - NT1 x0 = *f; - NT1 x1 = *++f; - CGAL_assertion (++f == e); - CGAL_assertion (check_limits(x0) && check_limits(x1)); - Vector a = { x0, x1 }; - return a; - } - }; - - struct Iterator_and_last { - template - Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const { - CGAL_assertion(d==2); - NT1 x = *f; - CGAL_assertion (++f == e); - CGAL_assertion (check_limits(x) && check_limits(t)); - Vector a = { x, t }; - return a; - } - }; - - struct Values { - Vector operator()(NT1 a,NT1 b) const { - CGAL_assertion (check_limits(a) && check_limits(b)); - Vector r = { a, b }; - return r; - } - }; - - /* - // Maybe safer not to provide it - struct Values_divide { - Vector operator()(double h,double a,double b) const { - Vector r = { a/h, b/h }; - return r; - } - }; - */ - }; - - // Since we lie about NT, be consistent about it - typedef transforming_iterator,NT1 const*> Vector_const_iterator; - static inline Vector_const_iterator vector_begin(Vector const&a){ - return Vector_const_iterator(a.begin()); - } - static inline Vector_const_iterator vector_end(Vector const&a){ - return Vector_const_iterator(a.end()); - } - static inline unsigned size_of_vector(Vector){ - return 2; - } - - // for unsigned NT1, check what changes to do. - // return NT or NT2? - static NT determinant_of_vectors(Vector a, Vector b) { - return CGAL::determinant_of_vectors(a,b); - } - static CGAL::Sign sign_of_determinant_of_vectors(Vector a, Vector b) { - return CGAL::sign_of_determinant_of_vectors(a,b); - } - - static NT determinant_of_points(Vector a, Vector b, Vector c) { - // could be faster to convert to NT directly - NT1b a0=a[0]; NT1b a1=a[1]; - NT1b x0=b[0]-a0; NT1b x1=b[1]-a1; - NT1b y0=c[0]-a0; NT1b y1=c[1]-a1; - return CGAL::determinant(x0,x1,y0,y1); - } - static CGAL::Sign sign_of_determinant_of_points(Vector a, Vector b, Vector c) { - NT1b a0=a[0]; NT1b a1=a[1]; - NT1b x0=b[0]-a0; NT1b x1=b[1]-a1; - NT2b y0=c[0]-a0; NT2b y1=c[1]-a1; - return CGAL::compare(x0*y1,x1*y0); - } - }; - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h deleted file mode 100644 index f9cc4e3c..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_VECTOR_VECTOR_H -#define CGAL_VECTOR_VECTOR_H -#include -#include -#include -#include -#include -#include -#include -namespace CGAL { - -//Derive from a class that doesn't depend on Dim, or still use Dim for checking? -template struct Vector_vector { - typedef NT_ NT; - typedef Dim_ Dimension; - typedef Max_dim_ Max_dimension; - typedef std::vector Vector; - template< class D2, class D3=D2 > - struct Rebind_dimension { - typedef Vector_vector< NT, D2, D3 > Other; - }; - template struct Property : boost::false_type {}; - - struct Construct_vector { - struct Dimension { - Vector operator()(int d) const { - return Vector(d); - } - }; - - struct Iterator { - template - Vector operator()(int CGAL_assertion_code(d),Iter const& f,Iter const& e) const { - CGAL_assertion(d==std::distance(f,e)); - return Vector(f,e); - } - }; - - // unneeded thanks to Iterator_and_last? -#if 0 - struct Iterator_add_one { - template - Vector operator()(int CGAL_assertion_code(d),Iter const& f,Iter const& e) const { - CGAL_assertion(d==std::distance(f,e)+1); - Vector a; - a.reserve(d+1); - a.insert(a.end(),f,e); - a.push_back(1); - return a; - } - }; -#endif - - struct Iterator_and_last { - template - Vector operator()(int d,Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const { - CGAL_assertion(d==std::distance(f,e)+1); - Vector a; - a.reserve(d+1); - a.insert(a.end(),f,e); - a.push_back(CGAL_FORWARD(T,t)); - return a; - } - }; - - // useless, use a transform_iterator? -#if 0 - struct Iterator_and_last_divide { - template - Vector operator()(int d,Iter f,Iter const& e,T const&t) const { - CGAL_assertion(d==std::distance(f,e)+1); - Vector a; - a.reserve(d+1); - for(;f!=e;++f){ - a.push_back(*f/t); - } - return a; - } - }; -#endif - - struct Values { -#ifdef CGAL_CXX11 - template - Vector operator()(U&&...u) const { - //TODO: check the right number of {}, g++ accepts one and two - Vector a={forward_safe(u)...}; - return a; - } -#else - -#define CGAL_VAR(Z,N,_) a.push_back(t##N); -#define CGAL_CODE(Z,N,_) Vector operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ - Vector a; \ - a.reserve(N); \ - BOOST_PP_REPEAT(N,CGAL_VAR,) \ - return a; \ -} -BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) -#undef CGAL_CODE -#undef CGAL_VAR - -#endif - }; - - struct Values_divide { -#ifdef CGAL_CXX11 - template - Vector operator()(H const&h,U&&...u) const { - //TODO: do we want to cast at some point? - //e.g. to avoid 1/2 in integers - // ==> use Rational_traits().make_rational(x,y) ? - Vector a={Rational_traits().make_rational(std::forward(u),h)...}; - return a; - } -#else - -#define CGAL_VAR(Z,N,_) a.push_back(Rational_traits().make_rational( t##N ,h)); -#define CGAL_CODE(Z,N,_) template Vector \ - operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ - Vector a; \ - a.reserve(N); \ - BOOST_PP_REPEAT(N,CGAL_VAR,) \ - return a; \ - } - BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) -#undef CGAL_CODE -#undef CGAL_VAR - -#endif - }; - }; - typedef typename Vector::const_iterator Vector_const_iterator; - static Vector_const_iterator vector_begin(Vector const&a){ - return a.begin(); - } - static Vector_const_iterator vector_end(Vector const&a){ - return a.end(); - } - static int size_of_vector(Vector const&a){ - return (int)a.size(); - } -}; - - -} -#endif - diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h deleted file mode 100644 index 44e9aa96..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h +++ /dev/null @@ -1,305 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_CARTESIAN_WRAP_H -#define CGAL_KERNEL_D_CARTESIAN_WRAP_H - -#include -#include - -#if defined(BOOST_MSVC) -# pragma warning(push) -# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I' - // http://lists.boost.org/boost-users/2014/11/83291.php -#endif -#include -#include -#include -#include -#include -#include - -#include - -#include -#include -#include - -//TODO: do we want to store the kernel ref in the Object wrappers? It would allow for additions and operator[] and things like that to work, but objects would still need to be created by functors. - -namespace CGAL { -namespace internal { -BOOST_MPL_HAS_XXX_TRAIT_DEF(Is_wrapper) -template::value> struct Is_wrapper { - enum { value=false }; - typedef Tag_false type; -}; -template struct Is_wrapper { - typedef typename T::Is_wrapper type; - enum { value=type::value }; -}; - -template::value> struct Is_wrapper_iterator { - enum { value=false }; - typedef Tag_false type; -}; -template struct Is_wrapper_iterator : - Is_wrapper::type>::value_type> -{ }; - -struct Forward_rep { -//TODO: make a good C++0X version with perfect forwarding -//#ifdef CGAL_CXX11 -//template ::type>::value&&!Is_wrapper_iterator::type>::value>::type> -//T&& operator()(typename std::remove_reference::type&& t) const {return static_cast(t);}; -//template ::type>::value&&!Is_wrapper_iterator::type>::value>::type> -//T&& operator()(typename std::remove_reference::type& t) const {return static_cast(t);}; -// -//template ::type>::value>::type> -//typename Type_copy_cvref::type::Rep>::type&& -//operator()(T&& t) const { -// return static_cast::type::Rep>::type&&>(t.rep()); -//}; -// -//template ::type>::value>::type> -//transforming_iterator::type> -//operator()(T&& t) const { -// return make_transforming_iterator(std::forward(t),Forward_rep()); -//}; -//#else -template ::value,bool=Is_wrapper_iterator::value> struct result_; -template struct result_{typedef T const& type;}; -template struct result_{typedef typename decay::type::Rep const& type;}; -template struct result_{typedef transforming_iterator::type> type;}; -template struct result; -template struct result : result_ {}; - -template typename boost::disable_if,Is_wrapper_iterator >,T>::type const& operator()(T const& t) const {return t;} -template typename boost::disable_if,Is_wrapper_iterator >,T>::type& operator()(T& t) const {return t;} - -template typename T::Rep const& operator()(T const& t, typename boost::enable_if >::type* = 0) const {return t.rep();} - -template transforming_iterator,T>::type> operator()(T const& t) const {return make_transforming_iterator(t,Forward_rep());} -//#endif -}; -} - -template ::value> -struct Map_wrapping_type : Get_type {}; -#define CGAL_REGISTER_OBJECT_WRAPPER(X) \ - template \ - struct Map_wrapping_type { \ - typedef Wrap::X##_d type; \ - } -CGAL_REGISTER_OBJECT_WRAPPER(Point); -CGAL_REGISTER_OBJECT_WRAPPER(Vector); -CGAL_REGISTER_OBJECT_WRAPPER(Segment); -CGAL_REGISTER_OBJECT_WRAPPER(Sphere); -CGAL_REGISTER_OBJECT_WRAPPER(Hyperplane); -CGAL_REGISTER_OBJECT_WRAPPER(Weighted_point); -#undef CGAL_REGISTER_OBJECT_WRAPPER - -// Note: this tends to be an all or nothing thing currently, wrapping -// only some types breaks, probably because we don't check whether the -// return type is indeed wrapped. -template < typename Base_ , typename Derived_ = Default > -struct Cartesian_wrap : public Base_ -{ - CGAL_CONSTEXPR Cartesian_wrap(){} - CGAL_CONSTEXPR Cartesian_wrap(int d):Base_(d){} - typedef Base_ Kernel_base; - typedef Cartesian_wrap Self; - // TODO: pass the 2 types Self and Derived to the wrappers, they can use Self for most purposes and Derived only for Kernel_traits' typedef R. - typedef typename Default::Get::type Derived; - // FIXME: The list doesn't belong here. - typedef boost::mpl::vector Wrapped_list; - - template - struct Type : Map_wrapping_type {}; - - //Translate the arguments - template ::type, - bool=Provides_functor::value, - bool=boost::mpl::contains::type>::type::value> - struct Functor { - typedef typename Get_functor::type B; - struct type { - B b; - type(){} - type(Self const&k):b(k){} - typedef typename B::result_type result_type; -#ifdef CGAL_CXX11 - template result_type operator()(U&&...u)const{ - return b(internal::Forward_rep()(u)...); - } -#else -#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) -#define CGAL_CODE(Z,N,_) template result_type \ - operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ - return b(BOOST_PP_ENUM(N,CGAL_VAR,)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE -#undef CGAL_VAR -// In case the last argument needs to be non-const. Fragile... -#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) -#define CGAL_CODE(Z,N,_) template result_type \ - operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u),V&v)const{ \ - return b(BOOST_PP_ENUM(N,CGAL_VAR,),internal::Forward_rep()(v)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,8,CGAL_CODE,_) -#undef CGAL_CODE -#undef CGAL_VAR -#endif - }; - }; - - // Preserve the difference between Null_functor and nothing. - template - struct Functor - : Get_functor {}; - - //Translate both the arguments and the result - //TODO: Check Is_wrapper instead of relying on map_result_tag? - template struct Functor { - typedef typename Get_functor::type B; - struct type { - B b; - type(){} - type(Self const&k):b(k){} - typedef typename map_result_tag::type result_tag; - // FIXME: Self or Derived? - typedef typename Get_type::type result_type; -#ifdef CGAL_CXX11 - template result_type operator()(U&&...u)const{ - return result_type(Eval_functor(),b,internal::Forward_rep()(u)...); - } -#else -#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) -#define CGAL_CODE(Z,N,_) template result_type \ - operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ - return result_type(Eval_functor(),b,BOOST_PP_ENUM(N,CGAL_VAR,)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE -#undef CGAL_VAR -#endif - }; - }; - -}; - -template < typename Base_ > -struct Cartesian_refcount : public Base_ -{ - CGAL_CONSTEXPR Cartesian_refcount(){} - CGAL_CONSTEXPR Cartesian_refcount(int d):Base_(d){} - typedef Base_ Kernel_base; - typedef Cartesian_refcount Self; - - // FIXME: Use object_list, or a list passed as argument, or anything - // automatic. - template struct Type : Get_type {}; -#define CGAL_Kernel_obj(X,Y) \ - template struct Type { typedef Ref_count_obj type; }; - - CGAL_Kernel_obj(Point,point) - CGAL_Kernel_obj(Vector,vector) -#undef CGAL_Kernel_obj - - template struct Dispatch { - //typedef typename map_functor_type::type f_t; - typedef typename map_result_tag::type r_t; - enum { - is_nul = boost::is_same::type,Null_functor>::value, - ret_rcobj = boost::is_same::value || boost::is_same::value - }; - }; - - //Translate the arguments - template::is_nul,bool=Dispatch::ret_rcobj> struct Functor { - typedef typename Get_functor::type B; - struct type { - B b; - type(){} - type(Self const&k):b(k){} - typedef typename B::result_type result_type; -#ifdef CGAL_CXX11 - template result_type operator()(U&&...u)const{ - return b(internal::Forward_rep()(u)...); - } -#else - result_type operator()()const{ - return b(); - } -#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) -#define CGAL_CODE(Z,N,_) template result_type \ - operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ - return b(BOOST_PP_ENUM(N,CGAL_VAR,)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE -#undef CGAL_VAR -#endif - }; - }; - - //Translate both the arguments and the result - template struct Functor { - typedef Null_functor type; - }; - - template struct Functor { - typedef typename Get_functor::type B; - struct type { - B b; - type(){} - type(Self const&k):b(k){} - typedef typename map_result_tag::type result_tag; - typedef typename Get_type::type result_type; -#ifdef CGAL_CXX11 - template result_type operator()(U&&...u)const{ - return result_type(Eval_functor(),b,internal::Forward_rep()(u)...); - } -#else - result_type operator()()const{ - return result_type(Eval_functor(),b); - } -#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) -#define CGAL_CODE(Z,N,_) template result_type \ - operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ - return result_type(Eval_functor(),b,BOOST_PP_ENUM(N,CGAL_VAR,)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE -#undef CGAL_VAR -#endif - }; - }; - -}; - -} //namespace CGAL - -#if defined(BOOST_MSVC) -# pragma warning(pop) -#endif - -#endif // CGAL_KERNEL_D_CARTESIAN_WRAP_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h deleted file mode 100644 index 54fd50bd..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_WRAPPER_HYPERPLANE_D_H -#define CGAL_WRAPPER_HYPERPLANE_D_H - -#include -#include -#include -#include -#include -#ifndef CGAL_CXX11 -#include -#endif -#include - -namespace CGAL { -namespace Wrap { - -template -class Hyperplane_d : public Get_type::type -{ - typedef typename Get_type::type FT_; - typedef typename R_::Kernel_base Kbase; - typedef typename Get_type::type Vector_; - typedef typename Get_functor >::type CHBase; - typedef typename Get_functor::type OVBase; - typedef typename Get_functor::type HTBase; - - typedef Hyperplane_d Self; - CGAL_static_assertion((boost::is_same::type>::value)); - -public: - - typedef Tag_true Is_wrapper; - typedef typename R_::Default_ambient_dimension Ambient_dimension; - typedef typename Increment_dimension::type Feature_dimension; - - typedef typename Get_type::type Rep; - - const Rep& rep() const - { - return *this; - } - - Rep& rep() - { - return *this; - } - - typedef R_ R; - -#ifdef CGAL_CXX11 - template::type...>,std::tuple >::value>::type> explicit Hyperplane_d(U&&...u) - : Rep(CHBase()(std::forward(u)...)){} - -// // called from Construct_point_d -// template explicit Point_d(Eval_functor&&,U&&...u) -// : Rep(Eval_functor(), std::forward(u)...){} - template explicit Hyperplane_d(Eval_functor&&,F&&f,U&&...u) - : Rep(std::forward(f)(std::forward(u)...)){} - -#if 0 - // the new standard may make this necessary - Point_d(Point_d const&)=default; - Point_d(Point_d &);//=default; - Point_d(Point_d &&)=default; -#endif - - // try not to use these - Hyperplane_d(Rep const& v) : Rep(v) {} - Hyperplane_d(Rep& v) : Rep(static_cast(v)) {} - Hyperplane_d(Rep&& v) : Rep(std::move(v)) {} - -#else - - Hyperplane_d() : Rep(CHBase()()) {} - - Hyperplane_d(Rep const& v) : Rep(v) {} // try not to use it - -#define CGAL_CODE(Z,N,_) template \ - explicit Hyperplane_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(CHBase()( \ - BOOST_PP_ENUM_PARAMS(N,t))) {} \ - \ - template \ - Hyperplane_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} - /* - template \ - Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} - */ - - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - -#endif - - //TODO: if OVBase returns a reference to a base vector, cast it to a - //reference to a wrapper vector. Ugly but should be safe. - Vector_ orthogonal_vector()const{ - return Vector_(Eval_functor(),OVBase(),rep()); - } - FT_ translation()const{ - return HTBase()(rep()); - } - - -}; - -} //namespace Wrap -} //namespace CGAL - -#endif // CGAL_WRAPPER_SPHERE_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h deleted file mode 100644 index 0718c947..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h +++ /dev/null @@ -1,284 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_WRAPPER_POINT_D_H -#define CGAL_WRAPPER_POINT_D_H - -#include -#include -#include -#include -#include -#include -#include -#include -#ifndef CGAL_CXX11 -#include -#endif -#include - -namespace CGAL { -namespace Wrap { - -template -class Point_d : public Get_type::type - // Deriving won't work if the point is just a __m256d. - // Test boost/std::is_class for instance -{ - typedef typename Get_type::type RT_; - typedef typename Get_type::type FT_; - typedef typename R_::Kernel_base Kbase; - typedef typename Get_type::type Vector_; - typedef typename Get_functor >::type CPBase; - typedef typename Get_functor::type CCBase; - typedef typename Get_functor >::type CPI; - - - typedef Point_d Self; - CGAL_static_assertion((boost::is_same::type>::value)); - -public: - - typedef Tag_true Is_wrapper; - typedef typename R_::Default_ambient_dimension Ambient_dimension; - typedef Dimension_tag<0> Feature_dimension; - - typedef typename Get_type::type Rep; - //typedef typename CGAL::decay::type>::type Cartesian_const_iterator; - - const Rep& rep() const - { - return *this; - } - - Rep& rep() - { - return *this; - } - - typedef R_ R; - -#ifdef CGAL_CXX11 - template::type...>,std::tuple >::value>::type> explicit Point_d(U&&...u) - : Rep(CPBase()(std::forward(u)...)){} - -// // called from Construct_point_d -// template explicit Point_d(Eval_functor&&,U&&...u) -// : Rep(Eval_functor(), std::forward(u)...){} - template explicit Point_d(Eval_functor&&,F&&f,U&&...u) - : Rep(std::forward(f)(std::forward(u)...)){} - -#if 0 - // the new standard may make this necessary - Point_d(Point_d const&)=default; - Point_d(Point_d &);//=default; - Point_d(Point_d &&)=default; -#endif - - // try not to use these - Point_d(Rep const& v) : Rep(v) {} - Point_d(Rep& v) : Rep(static_cast(v)) {} - Point_d(Rep&& v) : Rep(std::move(v)) {} - - // this one should be implicit - Point_d(Origin const& v) - : Rep(CPBase()(v)) {} - Point_d(Origin& v) - : Rep(CPBase()(v)) {} - Point_d(Origin&& v) - : Rep(CPBase()(std::move(v))) {} - -#else - - Point_d() : Rep(CPBase()()) {} - - Point_d(Rep const& v) : Rep(v) {} // try not to use it - -#define CGAL_CODE(Z,N,_) template \ - explicit Point_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(CPBase()( \ - BOOST_PP_ENUM_PARAMS(N,t))) {} \ - \ - template \ - Point_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} - /* - template \ - Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} - */ - - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - - // this one should be implicit - Point_d(Origin const& o) - : Rep(CPBase()(o)) {} - -#endif - - typename boost::result_of::type cartesian(int i)const{ - return CCBase()(rep(),i); - } - typename boost::result_of::type operator[](int i)const{ - return CCBase()(rep(),i); - } - - typename boost::result_of::type cartesian_begin()const{ - return CPI()(rep(),Begin_tag()); - } - - typename boost::result_of::type cartesian_end()const{ - return CPI()(rep(),End_tag()); - } - - int dimension() const { - typedef typename Get_functor::type PDBase; - return PDBase()(rep()); - } - - /* - Direction_d direction() const - { - return R().construct_direction_d_object()(*this); - } - - Vector_d transform(const Aff_transformation_d &t) const - { - return t.transform(*this); - } - - Vector_d operator/(const RT& c) const - { - return R().construct_divided_vector_d_object()(*this,c); - } - - Vector_d operator/(const typename First_if_different::Type & c) const - { - return R().construct_divided_vector_d_object()(*this,c); - } - - typename Qualified_result_of::type - x() const - { - return R().compute_x_3_object()(*this); - } - - typename Qualified_result_of::type - y() const - { - return R().compute_y_3_object()(*this); - } - - typename Qualified_result_of::type - z() const - { - return R().compute_z_3_object()(*this); - } - - typename Qualified_result_of::type - hx() const - { - return R().compute_hx_3_object()(*this); - } - - typename Qualified_result_of::type - hy() const - { - return R().compute_hy_3_object()(*this); - } - - typename Qualified_result_of::type - hz() const - { - return R().compute_hz_3_object()(*this); - } - - typename Qualified_result_of::type - hw() const - { - return R().compute_hw_3_object()(*this); - } - - typename Qualified_result_of::type - cartesian(int i) const - { - CGAL_kernel_precondition( (i == 0) || (i == 1) || (i == 2) ); - if (i==0) return x(); - if (i==1) return y(); - return z(); - } - - typename Qualified_result_of::type - homogeneous(int i) const - { - CGAL_kernel_precondition( (i >= 0) || (i <= 3) ); - if (i==0) return hx(); - if (i==1) return hy(); - if (i==2) return hz(); - return hw(); - } - - typename Qualified_result_of::type - squared_length() const - { - return R().compute_squared_length_3_object()(*this); - } -*/ -}; -#if 0 -template Point_d::Point_d(Point_d &)=default; -#endif - -//TODO: IO - -template -std::ostream& operator <<(std::ostream& os, const Point_d& p) -{ - typedef typename R_::Kernel_base Kbase; - typedef typename Get_functor >::type CPI; - // Should just be "auto"... - typename CGAL::decay::Rep,Begin_tag) - >::type>::type - b = p.cartesian_begin(), - e = p.cartesian_end(); - os << p.dimension(); - for(; b != e; ++b){ - os << " " << *b; - } - return os; -} - -//template -//Vector_d operator+(const Vector_d& v,const Vector_d& w) const -//{ -// return typename R::template Construct::type()(v,w); -//} -// -//template -//Vector_d operator-(const Vector_d& v,const Vector_d& w) const -//{ -// return typename R::template Construct::type()(v,w); -//} - -} //namespace Wrap -} //namespace CGAL - -#endif // CGAL_WRAPPER_POINT_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h deleted file mode 100644 index f33e14c0..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_WRAPPER_REF_COUNT_OBJ_H -#define CGAL_WRAPPER_REF_COUNT_OBJ_H - -#include -#include -#include -#include -#include -#include -#include -#include -#ifndef CGAL_CXX11 -#include -#endif -#include - -// no need for a fancy interface here, people can use the Point_d wrapper on -// top. - -namespace CGAL { - -template -class Ref_count_obj -{ - typedef typename R_::Kernel_base Kbase; - typedef typename Get_functor >::type CBase; - - typedef Ref_count_obj Self; - CGAL_static_assertion((boost::is_same::type>::value)); - -public: - typedef R_ R; - - typedef Tag_true Is_wrapper; - typedef typename R_::Default_ambient_dimension Ambient_dimension; - //typedef Dimension_tag<0> Feature_dimension; - - typedef typename Get_type::type Rep; - typedef Handle_for Data; - -private: - Data data; -public: - - const Rep& rep() const - { - return CGAL::get_pointee_or_identity(data); - } - -#ifdef CGAL_CXX11 - template::type...>,std::tuple >::value>::type> explicit Ref_count_obj(U&&...u) - : data(Eval_functor(),CBase(),std::forward(u)...){} - - template explicit Ref_count_obj(Eval_functor&&,F&&f,U&&...u) - : data(Eval_functor(),std::forward(f),std::forward(u)...){} - - // try not to use these - Ref_count_obj(Rep const& v) : data(v) {} - Ref_count_obj(Rep& v) : data(static_cast(v)) {} - Ref_count_obj(Rep&& v) : data(std::move(v)) {} - - // Do we really need this for point? -// // this one should be implicit -// Ref_count_obj(Origin const& v) -// : data(Eval_functor(),CBase(),v) {} -// Ref_count_obj(Origin& v) -// : data(Eval_functor(),CBase(),v) {} -// Ref_count_obj(Origin&& v) -// : data(Eval_functor(),CBase(),std::move(v)) {} - -#else - - Ref_count_obj() : data(Eval_functor(),CBase()) {} - - Ref_count_obj(Rep const& v) : data(v) {} // try not to use it - -#define CGAL_CODE(Z,N,_) template \ - explicit Ref_count_obj(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : data(Eval_functor(),CBase(),BOOST_PP_ENUM_PARAMS(N,t)) {} \ - \ - template \ - Ref_count_obj(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : data(Eval_functor(),f,BOOST_PP_ENUM_PARAMS(N,t)) {} - - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - template - Ref_count_obj(Eval_functor,F const& f) - : data(Eval_functor(),f) {} - -// // this one should be implicit -// Ref_count_obj(Origin const& o) -// : data(Eval_functor(),CBase(),o) {} - -#endif - -}; - -} //namespace CGAL - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h deleted file mode 100644 index bfb20a77..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_WRAPPER_SEGMENT_D_H -#define CGAL_WRAPPER_SEGMENT_D_H - -#include -#include -#include -#include -#include -#include -#include -#ifndef CGAL_CXX11 -#include -#endif -#include - -namespace CGAL { -namespace Wrap { - -template -class Segment_d : public Get_type::type -{ - typedef typename Get_type::type RT_; - typedef typename Get_type::type FT_; - typedef typename R_::Kernel_base Kbase; - typedef typename Get_type::type Point_; - typedef typename Get_functor >::type CPBase; - typedef typename Get_functor >::type CSBase; - typedef typename Get_functor::type CSEBase; - - typedef Segment_d Self; - CGAL_static_assertion((boost::is_same::type>::value)); - -public: - - typedef Tag_true Is_wrapper; - typedef typename R_::Default_ambient_dimension Ambient_dimension; - typedef Dimension_tag<1> Feature_dimension; - - typedef typename Get_type::type Rep; - - const Rep& rep() const - { - return *this; - } - - Rep& rep() - { - return *this; - } - - typedef R_ R; - -#ifdef CGAL_CXX11 - template::type...>,std::tuple >::value>::type> explicit Segment_d(U&&...u) - : Rep(CSBase()(std::forward(u)...)){} - -// // called from Construct_point_d -// template explicit Point_d(Eval_functor&&,U&&...u) -// : Rep(Eval_functor(), std::forward(u)...){} - template explicit Segment_d(Eval_functor&&,F&&f,U&&...u) - : Rep(std::forward(f)(std::forward(u)...)){} - -#if 0 - // the new standard may make this necessary - Point_d(Point_d const&)=default; - Point_d(Point_d &);//=default; - Point_d(Point_d &&)=default; -#endif - - // try not to use these - Segment_d(Rep const& v) : Rep(v) {} - Segment_d(Rep& v) : Rep(static_cast(v)) {} - Segment_d(Rep&& v) : Rep(std::move(v)) {} - -#else - - Segment_d() : Rep(CSBase()()) {} - - Segment_d(Rep const& v) : Rep(v) {} // try not to use it - -#define CGAL_CODE(Z,N,_) template \ - explicit Segment_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(CSBase()( \ - BOOST_PP_ENUM_PARAMS(N,t))) {} \ - \ - template \ - Segment_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} - /* - template \ - Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} - */ - - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - -#endif - - //TODO: if CSEBase returns a reference to a base point, cast it to a - //reference to a wrapper point. Ugly but should be safe. - Point_ source()const{ - return Point_(Eval_functor(),CSEBase(),rep(),0); - } - Point_ target()const{ - return Point_(Eval_functor(),CSEBase(),rep(),1); - } - -}; - -} //namespace Wrap -} //namespace CGAL - -#endif // CGAL_WRAPPER_SEGMENT_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h deleted file mode 100644 index 87f0c66e..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_WRAPPER_SPHERE_D_H -#define CGAL_WRAPPER_SPHERE_D_H - -#include -#include -#include -#include -#include -#ifndef CGAL_CXX11 -#include -#endif -#include - -namespace CGAL { -namespace Wrap { - -template -class Sphere_d : public Get_type::type -{ - typedef typename Get_type::type FT_; - typedef typename R_::Kernel_base Kbase; - typedef typename Get_type::type Point_; - typedef typename Get_functor >::type CSBase; - typedef typename Get_functor::type COSBase; - typedef typename Get_functor::type SRBase; - - typedef Sphere_d Self; - CGAL_static_assertion((boost::is_same::type>::value)); - -public: - - typedef Tag_true Is_wrapper; - typedef typename R_::Default_ambient_dimension Ambient_dimension; - typedef typename Increment_dimension::type Feature_dimension; - - typedef typename Get_type::type Rep; - - const Rep& rep() const - { - return *this; - } - - Rep& rep() - { - return *this; - } - - typedef R_ R; - -#ifdef CGAL_CXX11 - template::type...>,std::tuple >::value>::type> explicit Sphere_d(U&&...u) - : Rep(CSBase()(std::forward(u)...)){} - -// // called from Construct_point_d -// template explicit Point_d(Eval_functor&&,U&&...u) -// : Rep(Eval_functor(), std::forward(u)...){} - template explicit Sphere_d(Eval_functor&&,F&&f,U&&...u) - : Rep(std::forward(f)(std::forward(u)...)){} - -#if 0 - // the new standard may make this necessary - Point_d(Point_d const&)=default; - Point_d(Point_d &);//=default; - Point_d(Point_d &&)=default; -#endif - - // try not to use these - Sphere_d(Rep const& v) : Rep(v) {} - Sphere_d(Rep& v) : Rep(static_cast(v)) {} - Sphere_d(Rep&& v) : Rep(std::move(v)) {} - -#else - - Sphere_d() : Rep(CSBase()()) {} - - Sphere_d(Rep const& v) : Rep(v) {} // try not to use it - -#define CGAL_CODE(Z,N,_) template \ - explicit Sphere_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(CSBase()( \ - BOOST_PP_ENUM_PARAMS(N,t))) {} \ - \ - template \ - Sphere_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} - /* - template \ - Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} - */ - - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - -#endif - - //TODO: if COSBase returns a reference to a base point, cast it to a - //reference to a wrapper point. Ugly but should be safe. - Point_ center()const{ - return Point_(Eval_functor(),COSBase(),rep()); - } - FT_ squared_radius()const{ - return SRBase()(rep()); - } - -}; - -} //namespace Wrap -} //namespace CGAL - -#endif // CGAL_WRAPPER_SPHERE_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h deleted file mode 100644 index b7d1f0d0..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_WRAPPER_VECTOR_D_H -#define CGAL_WRAPPER_VECTOR_D_H - -#include -#include -#include -#include -#include -#include -#include -#ifndef CGAL_CXX11 -#include -#endif -#include - -namespace CGAL { -namespace Wrap { - -template -class Vector_d : public Get_type::type -{ - typedef typename Get_type::type RT_; - typedef typename Get_type::type FT_; - typedef typename R_::Kernel_base Kbase; - typedef typename Get_type::type Point_; - typedef typename Get_functor >::type CVBase; - typedef typename Get_functor::type CCBase; - typedef typename Get_functor >::type CVI; - typedef typename Get_functor::type SLBase; - - typedef Vector_d Self; - CGAL_static_assertion((boost::is_same::type>::value)); - -public: - - typedef Tag_true Is_wrapper; - typedef typename R_::Default_ambient_dimension Ambient_dimension; - typedef Dimension_tag<0> Feature_dimension; - - //typedef typename R_::Vector_cartesian_const_iterator Cartesian_const_iterator; - typedef typename Get_type::type Rep; - - const Rep& rep() const - { - return *this; - } - - Rep& rep() - { - return *this; - } - - typedef R_ R; - -#ifdef CGAL_CXX11 - template::type...>,std::tuple >::value>::type> explicit Vector_d(U&&...u) - : Rep(CVBase()(std::forward(u)...)){} - -// // called from Construct_vector_d -// template explicit Vector_d(Eval_functor&&,U&&...u) -// : Rep(Eval_functor(), std::forward(u)...){} - template explicit Vector_d(Eval_functor&&,F&&f,U&&...u) - : Rep(std::forward(f)(std::forward(u)...)){} - -#if 0 - // the new standard may make this necessary - Vector_d(Vector_d const&)=default; - Vector_d(Vector_d &);//=default; - Vector_d(Vector_d &&)=default; -#endif - - // try not to use these - Vector_d(Rep const& v) : Rep(v) {} - Vector_d(Rep& v) : Rep(static_cast(v)) {} - Vector_d(Rep&& v) : Rep(std::move(v)) {} - - // this one should be implicit - Vector_d(Null_vector const& v) - : Rep(CVBase()(v)) {} - Vector_d(Null_vector& v) - : Rep(CVBase()(v)) {} - Vector_d(Null_vector&& v) - : Rep(CVBase()(std::move(v))) {} - -#else - - Vector_d() : Rep(CVBase()()) {} - - Vector_d(Rep const& v) : Rep(v) {} // try not to use it - -#define CGAL_CODE(Z,N,_) template \ - explicit Vector_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(CVBase()( \ - BOOST_PP_ENUM_PARAMS(N,t))) {} \ - \ - template \ - Vector_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} - /* - template \ - Vector_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} - */ - - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - - // this one should be implicit - Vector_d(Null_vector const& v) - : Rep(CVBase()(v)) {} - -#endif - - typename boost::result_of::type cartesian(int i)const{ - return CCBase()(rep(),i); - } - - typename boost::result_of::type operator[](int i)const{ - return CCBase()(rep(),i); - } - - typename boost::result_of::type cartesian_begin()const{ - return CVI()(rep(),Begin_tag()); - } - - typename boost::result_of::type cartesian_end()const{ - return CVI()(rep(),End_tag()); - } - - Vector_d operator-() const - { - return typename Get_functor::type()(*this); - } - - /* - Direction_d direction() const - { - return R().construct_direction_d_object()(*this); - } - - Vector_d transform(const Aff_transformation_d &t) const - { - return t.transform(*this); - } - - Vector_d operator/(const RT& c) const - { - return R().construct_divided_vector_d_object()(*this,c); - } - - Vector_d operator/(const typename First_if_different::Type & c) const - { - return R().construct_divided_vector_d_object()(*this,c); - } - - typename Qualified_result_of::type - x() const - { - return R().compute_x_3_object()(*this); - } - - typename Qualified_result_of::type - y() const - { - return R().compute_y_3_object()(*this); - } - - typename Qualified_result_of::type - z() const - { - return R().compute_z_3_object()(*this); - } - - typename Qualified_result_of::type - hx() const - { - return R().compute_hx_3_object()(*this); - } - - typename Qualified_result_of::type - hy() const - { - return R().compute_hy_3_object()(*this); - } - - typename Qualified_result_of::type - hz() const - { - return R().compute_hz_3_object()(*this); - } - - typename Qualified_result_of::type - hw() const - { - return R().compute_hw_3_object()(*this); - } - - typename Qualified_result_of::type - cartesian(int i) const - { - CGAL_kernel_precondition( (i == 0) || (i == 1) || (i == 2) ); - if (i==0) return x(); - if (i==1) return y(); - return z(); - } - - typename Qualified_result_of::type - homogeneous(int i) const - { - CGAL_kernel_precondition( (i >= 0) || (i <= 3) ); - if (i==0) return hx(); - if (i==1) return hy(); - if (i==2) return hz(); - return hw(); - } - - int dimension() const // bad idea? - { - return rep.dimension(); - } -*/ - typename boost::result_of::type squared_length()const{ - return SLBase()(rep()); - } -}; -#if 0 -template Vector_d::Vector_d(Vector_d &)=default; -#endif - -//TODO: IO - -template -Vector_d operator+(const Vector_d& v,const Vector_d& w) -{ - return typename Get_functor::type()(v,w); -} - -template -Vector_d operator-(const Vector_d& v,const Vector_d& w) -{ - return typename Get_functor::type()(v,w); -} - -} //namespace Wrap -} //namespace CGAL - -#endif // CGAL_WRAPPER_VECTOR_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h deleted file mode 100644 index 877eea21..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_WRAPPER_WEIGHTED_POINT_D_H -#define CGAL_WRAPPER_WEIGHTED_POINT_D_H - -#include -#include -#include -#include -#include -#ifndef CGAL_CXX11 -#include -#endif -#include - -namespace CGAL { -namespace Wrap { - -template -class Weighted_point_d : public Get_type::type -{ - typedef typename Get_type::type FT_; - typedef typename R_::Kernel_base Kbase; - typedef typename Get_type::type Point_; - typedef typename Get_functor >::type CWPBase; - typedef typename Get_functor::type PDWBase; - typedef typename Get_functor::type PWBase; - - typedef Weighted_point_d Self; - BOOST_STATIC_ASSERT((boost::is_same::type>::value)); - -public: - - typedef Tag_true Is_wrapper; - typedef typename R_::Default_ambient_dimension Ambient_dimension; - typedef Dimension_tag<0> Feature_dimension; - - typedef typename Get_type::type Rep; - - const Rep& rep() const - { - return *this; - } - - Rep& rep() - { - return *this; - } - - typedef R_ R; - -#ifdef CGAL_CXX11 - template::type...>,std::tuple >::value>::type> explicit Weighted_point_d(U&&...u) - : Rep(CWPBase()(std::forward(u)...)){} - -// // called from Construct_point_d -// template explicit Point_d(Eval_functor&&,U&&...u) -// : Rep(Eval_functor(), std::forward(u)...){} - template explicit Weighted_point_d(Eval_functor&&,F&&f,U&&...u) - : Rep(std::forward(f)(std::forward(u)...)){} - -#if 0 - // the new standard may make this necessary - Point_d(Point_d const&)=default; - Point_d(Point_d &);//=default; - Point_d(Point_d &&)=default; -#endif - - // try not to use these - Weighted_point_d(Rep const& v) : Rep(v) {} - Weighted_point_d(Rep& v) : Rep(static_cast(v)) {} - Weighted_point_d(Rep&& v) : Rep(std::move(v)) {} - -#else - - Weighted_point_d() : Rep(CWPBase()()) {} - - Weighted_point_d(Rep const& v) : Rep(v) {} // try not to use it - -#define CGAL_CODE(Z,N,_) template \ - explicit Weighted_point_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(CWPBase()( \ - BOOST_PP_ENUM_PARAMS(N,t))) {} \ - \ - template \ - Weighted_point_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} - /* - template \ - Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ - : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} - */ - - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE - -#endif - - //TODO: use references? - Point_ point()const{ - return Point_(Eval_functor(),PDWBase(),rep()); - } - FT_ weight()const{ - return PWBase()(rep()); - } - -}; - -} //namespace Wrap -} //namespace CGAL - -#endif // CGAL_WRAPPER_SPHERE_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h deleted file mode 100644 index 5a132ad2..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h +++ /dev/null @@ -1,1355 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H -#define CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#ifdef CGAL_CXX11 -#include -#endif - -namespace CGAL { -namespace CartesianDKernelFunctors { -namespace internal { -template struct Dimension_at_most { enum { value = false }; }; -template struct Dimension_at_most,b> { - enum { value = (a <= b) }; -}; -} - -template::value> struct Orientation_of_points : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) - typedef R_ R; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - typedef typename R::LA::Square_matrix Matrix; - - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type c(this->kernel()); - typename Get_functor::type pd(this->kernel()); - Point const& p0=*f++; - int d=pd(p0); - Matrix m(d,d); - // FIXME: this writes the vector coordinates in lines ? check all the other uses in this file, this may be wrong for some. - for(int i=0;f!=e;++f,++i) { - Point const& p=*f; - for(int j=0;j,typename R::Default_ambient_dimension>::value>::type> - template =3)>::type> - result_type operator()(U&&...u) const { - return operator()({std::forward(u)...}); - } - - template - result_type operator()(std::initializer_list

l) const { - return operator()(l.begin(),l.end()); - } -#else - //should we make it template to avoid instantiation for wrong dim? - //or iterate outside the class? -#define CGAL_VAR(Z,J,I) m(I,J)=c(p##I,J)-c(x,J); -#define CGAL_VAR2(Z,I,N) BOOST_PP_REPEAT(N,CGAL_VAR,I) -#define CGAL_CODE(Z,N,_) \ - result_type operator()(Point const&x, BOOST_PP_ENUM_PARAMS(N,Point const&p)) const { \ - typename Get_functor::type c(this->kernel()); \ - Matrix m(N,N); \ - BOOST_PP_REPEAT(N,CGAL_VAR2,N) \ - return R::LA::sign_of_determinant(CGAL_MOVE(m)); \ - } - -BOOST_PP_REPEAT_FROM_TO(7, 10, CGAL_CODE, _ ) - // No need to do it for <=6, since that uses a different code path -#undef CGAL_CODE -#undef CGAL_VAR2 -#undef CGAL_VAR -#endif -}; - -#ifdef CGAL_CXX11 -template struct Orientation_of_points,true> : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - templatestruct Help; - templatestruct Help > { - template result_type operator()(C const&c,P const&x,T&&t)const{ - return sign_of_determinant(c(std::get(t),I%d)-c(x,I%d)...); - } - }; - template result_type operator()(P0 const&x,P&&...p)const{ - static_assert(d==sizeof...(P),"Wrong number of arguments"); - typename Get_functor::type c(this->kernel()); - return Help::type>()(c,x,std::forward_as_tuple(std::forward

(p)...)); - } - - - template result_type help2(Dimension_tag, Iter f, Iter const&e, U&&...u)const{ - auto const&p=*f; - return help2(Dimension_tag(),++f,e,std::forward(u)...,p); - } - template result_type help2(Dimension_tag<0>, Iter CGAL_assertion_code(f), Iter const& CGAL_assertion_code(e), U&&...u)const{ - CGAL_assertion(f==e); - return operator()(std::forward(u)...); - } - template - result_type operator()(Iter f, Iter e)const{ - return help2(Dimension_tag(),f,e); - } -}; -#else -#define CGAL_VAR(Z,J,I) c(p##I,J)-x##J -#define CGAL_VAR2(Z,I,N) BOOST_PP_ENUM(N,CGAL_VAR,I) -#define CGAL_VAR3(Z,N,_) Point const&p##N=*++f; -#define CGAL_VAR4(Z,N,_) RT const&x##N=c(x,N); -#define CGAL_CODE(Z,N,_) \ -template struct Orientation_of_points,true> : private Store_kernel { \ - CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) \ - typedef R_ R; \ - typedef typename Get_type::type RT; \ - typedef typename Get_type::type Point; \ - typedef typename Get_type::type result_type; \ - result_type operator()(Point const&x, BOOST_PP_ENUM_PARAMS(N,Point const&p)) const { \ - typename Get_functor::type c(this->kernel()); \ - BOOST_PP_REPEAT(N,CGAL_VAR4,) \ - return sign_of_determinant(BOOST_PP_ENUM(N,CGAL_VAR2,N)); \ - } \ - template \ - result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{ \ - Point const&x=*f; \ - BOOST_PP_REPEAT(N,CGAL_VAR3,) \ - CGAL_assertion(++f==e); \ - return operator()(x,BOOST_PP_ENUM_PARAMS(N,p)); \ - } \ -}; - - BOOST_PP_REPEAT_FROM_TO(2, 7, CGAL_CODE, _ ) -#undef CGAL_CODE -#undef CGAL_VAR4 -#undef CGAL_VAR3 -#undef CGAL_VAR2 -#undef CGAL_VAR - -#endif - -template struct Orientation_of_points,true> : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - result_type operator()(Point const&x, Point const&y) const { - typename Get_functor::type c(this->kernel()); - // No sign_of_determinant(RT) :-( - return CGAL::compare(c(y,0),c(x,0)); - } - template - result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{ - Point const&x=*f; - Point const&y=*++f; - CGAL_assertion(++f==e); - return operator()(x,y); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Orientation_of_points_tag,(CartesianDKernelFunctors::Orientation_of_points),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Orientation_of_vectors : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Orientation_of_vectors) - typedef R_ R; - typedef typename Get_type::type Vector; - typedef typename Get_type::type result_type; - typedef typename R::LA::Square_matrix Matrix; - - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type c(this->kernel()); - typename Get_functor::type vd(this->kernel()); - // FIXME: Uh? Using it on a vector ?! - Vector const& v0=*f; - int d=vd(v0); - Matrix m(d,d); - for(int j=0;j=3)>::type> - result_type operator()(U&&...u) const { - return operator()({std::forward(u)...}); - } - - template - result_type operator()(std::initializer_list l) const { - return operator()(l.begin(),l.end()); - } -#else - //TODO -#endif -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Orientation_of_vectors_tag,(CartesianDKernelFunctors::Orientation_of_vectors),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Linear_rank : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Linear_rank) - typedef R_ R; - typedef typename Get_type::type Vector; - // Computing a sensible Uncertain is not worth it - typedef int result_type; - typedef typename R::LA::Dynamic_matrix Matrix; - - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type c(this->kernel()); - typename Get_functor::type vd(this->kernel()); - std::ptrdiff_t n=std::distance(f,e); - if (n==0) return 0; - Vector const& v0 = *f; - // FIXME: Uh? Using it on a vector ?! - int d=vd(v0); - Matrix m(d,n); - for(int j=0;j),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Linearly_independent : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Linearly_independent) - typedef R_ R; - typedef typename Get_type::type result_type; - - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type vd(this->kernel()); - std::ptrdiff_t n=std::distance(f,e); - // FIXME: Uh? Using it on a vector ?! - int d=vd(*f); - if (n>d) return false; - typename Get_functor::type lr(this->kernel()); - return lr(f,e) == n; - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Linearly_independent_tag,(CartesianDKernelFunctors::Linearly_independent),(Vector_tag),(Point_dimension_tag,Linear_rank_tag)); - -namespace CartesianDKernelFunctors { -template struct Contained_in_linear_hull : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Contained_in_linear_hull) - typedef R_ R; - typedef typename Get_type::type Vector; - // Computing a sensible Uncertain is not worth it - typedef bool result_type; - typedef typename R::LA::Dynamic_matrix Matrix; - - template - result_type operator()(Iter f, Iter e,V const&w)const{ - typename Get_functor::type c(this->kernel()); - typename Get_functor::type vd(this->kernel()); - std::ptrdiff_t n=std::distance(f,e); - if (n==0) return false; - // FIXME: Uh? Using it on a vector ?! - int d=vd(w); - Matrix m(d,n+1); - for(int i=0; f!=e; ++f,++i){ - Vector const& v = *f; - for(int j=0;j),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Affine_rank : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Affine_rank) - typedef R_ R; - typedef typename Get_type::type Point; - // Computing a sensible Uncertain is not worth it - typedef int result_type; - typedef typename R::LA::Dynamic_matrix Matrix; - - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type c(this->kernel()); - typename Get_functor::type pd(this->kernel()); - int n=(int)std::distance(f,e); - if (--n<=0) return n; - Point const& p0 = *f; - int d=pd(p0); - Matrix m(d,n); - for(int i=0; ++f!=e; ++i){ - Point const& p = *f; - for(int j=0;j),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Affinely_independent : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Affinely_independent) - typedef R_ R; - typedef typename Get_type::type result_type; - - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type pd(this->kernel()); - std::ptrdiff_t n=std::distance(f,e); - int d=pd(*f); - if (--n>d) return false; - typename Get_functor::type ar(this->kernel()); - return ar(f,e) == n; - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Affinely_independent_tag,(CartesianDKernelFunctors::Affinely_independent),(Point_tag),(Point_dimension_tag,Affine_rank_tag)); - -namespace CartesianDKernelFunctors { -template struct Contained_in_simplex : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Contained_in_simplex) - typedef R_ R; - typedef typename Get_type::type Point; - // Computing a sensible Uncertain<*> is not worth it - // typedef typename Get_type::type result_type; - typedef bool result_type; - typedef typename Increment_dimension::type D1; - typedef typename Increment_dimension::type D2; - typedef typename R::LA::template Rebind_dimension::Other LA; - typedef typename LA::Dynamic_matrix Matrix; - typedef typename LA::Dynamic_vector DynVec; - typedef typename LA::Vector Vec; - - template - result_type operator()(Iter f, Iter e, P const&q)const{ - typename Get_functor::type c(this->kernel()); - typename Get_functor::type pd(this->kernel()); - std::ptrdiff_t n=std::distance(f,e); - if (n==0) return false; - int d=pd(q); - Matrix m(d+1,n); - DynVec a(n); - // FIXME: Should use the proper vector constructor (Iterator_and_last) - Vec b(d+1); - for(int j=0;j),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { - namespace internal { - template - struct Matrix_col_access { - typedef Ref_ result_type; - int col; - Matrix_col_access(int r):col(r){} - template Ref_ operator()(Mat const& m, std::ptrdiff_t row)const{ - return m(row,col); - } - }; - } -template struct Linear_base : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Linear_base) - typedef R_ R; - typedef typename Get_type::type Vector; - typedef typename Get_type::type FT; - typedef void result_type; - typedef typename R::LA::Dynamic_matrix Matrix; - - template - result_type operator()(Iter f, Iter e, Oter&o)const{ - typename Get_functor::type c(this->kernel()); - typename Get_functor::type vd(this->kernel()); - typename Get_functor >::type cv(this->kernel()); - std::ptrdiff_t n=std::distance(f,e); - if (n==0) return; - Vector const& v0 = *f; - // FIXME: Uh? Using it on a vector ?! - int d=vd(v0); - Matrix m(d,n); - for(int j=0;j()(0,0)) -#else - FT -#endif - Ref; - typedef Iterator_from_indices > IFI; - *o++ = cv(IFI(b,0,i),IFI(b,d,i)); - } - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Linear_base_tag,(CartesianDKernelFunctors::Linear_base),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); - -#if 0 -namespace CartesianDKernelFunctors { -template::value> struct Orientation : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Orientation) - typedef R_ R; - typedef typename Get_type::type Vector; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - typedef typename Get_functor::type OP; - typedef typename Get_functor::type OV; - - //FIXME!!! - //when Point and Vector are distinct types, the dispatch should be made - //in a way that doesn't instantiate a conversion from Point to Vector - template - result_type operator()(Iter const&f, Iter const& e)const{ - typename Get_functor::type pd(this->kernel()); - typename std::iterator_traits::difference_type d=std::distance(f,e); - int dim=pd(*f); // BAD - if(d==dim) return OV(this->kernel())(f,e); - CGAL_assertion(d==dim+1); - return OP(this->kernel())(f,e); - } - //TODO: version that takes objects directly instead of iterators -}; - -template struct Orientation : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Orientation) - typedef R_ R; - typedef typename Get_type::type Vector; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - typedef typename Get_functor::type OP; - typedef typename Get_functor::type OV; - typedef typename R::LA::Square_matrix Matrix; - - //FIXME!!! - //when Point and Vector are distinct types, the dispatch should be made - //in a way that doesn't instantiate a conversion from Point to Vector - template - typename boost::enable_if,result_type>::type - operator()(Iter const&f, Iter const& e)const{ - return OP(this->kernel())(f,e); - } - template - typename boost::enable_if,result_type>::type - operator()(Iter const&f, Iter const& e)const{ - return OV(this->kernel())(f,e); - } - //TODO: version that takes objects directly instead of iterators -}; -} -#endif - -namespace CartesianDKernelFunctors { -template struct Power_side_of_power_sphere_raw : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Power_side_of_power_sphere_raw) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type FT; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - typedef typename Increment_dimension::type D1; - typedef typename Increment_dimension::type D2; - typedef typename R::LA::template Rebind_dimension::Other LA; - typedef typename LA::Square_matrix Matrix; - - template - result_type operator()(IterP f, IterP const& e, IterW fw, Pt const& p0, Wt const& w0) const { - typedef typename Get_functor::type Sqdo; - typename Get_functor::type c(this->kernel()); - typename Get_functor::type pd(this->kernel()); - - int d=pd(p0); - Matrix m(d+1,d+1); - if(CGAL::Is_stored::value) { - Sqdo sqdo(this->kernel()); - FT const& h0 = sqdo(p0) - w0; - for(int i=0;f!=e;++f,++fw,++i) { - Point const& p=*f; - for(int j=0;j),(Point_tag),(Point_dimension_tag,Squared_distance_to_origin_tag,Compute_point_cartesian_coordinate_tag)); - -// TODO: make Side_of_oriented_sphere call Power_side_of_power_sphere_raw -namespace CartesianDKernelFunctors { -template struct Side_of_oriented_sphere : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Side_of_oriented_sphere) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - typedef typename Increment_dimension::type D1; - typedef typename Increment_dimension::type D2; - typedef typename R::LA::template Rebind_dimension::Other LA; - typedef typename LA::Square_matrix Matrix; - - template - result_type operator()(Iter f, Iter const& e)const{ - Point const& p0=*f++; // *--e ? - return this->operator()(f,e,p0); - } - - template - result_type operator()(Iter f, Iter const& e, Point const& p0) const { - typedef typename Get_functor::type Sqdo; - typename Get_functor::type c(this->kernel()); - typename Get_functor::type pd(this->kernel()); - - int d=pd(p0); - Matrix m(d+1,d+1); - if(CGAL::Is_stored::value) { - Sqdo sqdo(this->kernel()); - for(int i=0;f!=e;++f,++i) { - Point const& p=*f; - for(int j=0;j=4)>::type> - result_type operator()(U&&...u) const { - return operator()({std::forward(u)...}); - } - - template - result_type operator()(std::initializer_list

l) const { - return operator()(l.begin(),l.end()); - } -#else - //TODO -#endif -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Side_of_oriented_sphere_tag,(CartesianDKernelFunctors::Side_of_oriented_sphere),(Point_tag),(Point_dimension_tag,Squared_distance_to_origin_tag,Compute_point_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Construct_circumcenter : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Construct_circumcenter) - typedef typename Get_type::type Point; - typedef Point result_type; - typedef typename Get_type::type FT; - template - result_type operator()(Iter f, Iter e)const{ - typedef typename Get_type::type Point; - typedef typename R_::LA LA; - typename Get_functor::type c(this->kernel()); - typename Get_functor >::type cp(this->kernel()); - typename Get_functor::type pd(this->kernel()); - typename Get_functor::type sdo(this->kernel()); - - Point const& p0=*f; - int d = pd(p0); - if (d+1 == std::distance(f,e)) - { - // 2*(x-y).c == x^2-y^2 - typedef typename LA::Square_matrix Matrix; - typedef typename LA::Vector Vec; - typedef typename LA::Construct_vector CVec; - FT const& n0 = sdo(p0); - Matrix m(d,d); - Vec b = typename CVec::Dimension()(d); - // Write the point coordinates in lines. - int i; - for(i=0; ++f!=e; ++i) { - Point const& p=*f; - for(int j=0;j::Other LAd; - typedef typename LAd::Square_matrix Matrix; - typedef typename LAd::Vector Vec; - typename Get_functor::type sp(this->kernel()); - int k=static_cast(std::distance(f,e)); - Matrix m(k,k); - Vec b(k); - Vec l(k); - int j,i=0; - for(Iter f2=f;f2!=e;++f2,++i){ - b(i)=m(i,i)=sdo(*f2); - j=0; - for(Iter f3=f;f3!=e;++f3,++j){ - m(j,i)=m(i,j)=sp(*f2,*f3); - } - } - for(i=1;i),(Point_tag),(Construct_ttag,Compute_point_cartesian_coordinate_tag,Scalar_product_tag,Squared_distance_to_origin_tag,Point_dimension_tag)); - -namespace CartesianDKernelFunctors { -template struct Squared_circumradius : Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Squared_circumradius) - typedef typename Get_type::type result_type; - template - result_type operator()(Iter f, Iter e)const{ - typename Get_functor::type cc(this->kernel()); - typename Get_functor::type sd(this->kernel()); - return sd(cc(f, e), *f); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Squared_circumradius_tag,(CartesianDKernelFunctors::Squared_circumradius),(Point_tag),(Construct_circumcenter_tag,Squared_distance_tag)); - -namespace CartesianDKernelFunctors { -// TODO: implement it directly, it should be at least as fast as Side_of_oriented_sphere. -template struct Side_of_bounded_sphere : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Side_of_bounded_sphere) - typedef R_ R; - typedef typename Get_type::type Point; - typedef typename Get_type::type result_type; - - template - result_type operator()(Iter f, Iter const& e) const { - Point const& p0 = *f++; // *--e ? - typename Get_functor::type pd(this->kernel()); - //FIXME: Doesn't work for non-full dimension. - CGAL_assertion (std::distance(f,e) == pd(p0)+1); - return operator() (f, e, p0); - } - - template - result_type operator()(Iter const& f, Iter const& e, Point const& p0) const { - typename Get_functor::type sos (this->kernel()); - typename Get_functor::type op (this->kernel()); - // enum_cast is not very generic, but since this function isn't supposed to remain like this... - return enum_cast (sos (f, e, p0) * op (f, e)); - } - -#ifdef CGAL_CXX11 - template =4)>::type> - result_type operator()(U&&...u) const { - return operator()({std::forward(u)...}); - } - - template - result_type operator()(std::initializer_list

l) const { - return operator()(l.begin(),l.end()); - } -#else - //TODO -#endif -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Side_of_bounded_sphere_tag,(CartesianDKernelFunctors::Side_of_bounded_sphere),(Point_tag),(Side_of_oriented_sphere_tag,Orientation_of_points_tag)); - -namespace CartesianDKernelFunctors { -template struct Side_of_bounded_circumsphere : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Side_of_bounded_circumsphere) - typedef typename Get_type::type result_type; - - template - result_type operator()(Iter f, Iter const& e, P const& p0) const { - // TODO: Special case when the dimension is full. - typename Get_functor::type cc(this->kernel()); - typename Get_functor::type cd(this->kernel()); - - return enum_cast(cd(cc(f, e), *f, p0)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Side_of_bounded_circumsphere_tag,(CartesianDKernelFunctors::Side_of_bounded_circumsphere),(Point_tag),(Squared_distance_tag,Construct_circumcenter_tag)); - -namespace CartesianDKernelFunctors { -template struct Point_to_vector : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Point_to_vector) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_type::type Point; - typedef typename Get_functor >::type CV; - typedef typename Get_functor >::type CI; - typedef Vector result_type; - typedef Point argument_type; - result_type operator()(argument_type const&v)const{ - CI ci(this->kernel()); - return CV(this->kernel())(ci(v,Begin_tag()),ci(v,End_tag())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Point_to_vector_tag,(CartesianDKernelFunctors::Point_to_vector),(Point_tag,Vector_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Vector_to_point : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Vector_to_point) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_type::type Point; - typedef typename Get_functor >::type CP; - typedef typename Get_functor >::type CI; - typedef Point result_type; - typedef Vector argument_type; - result_type operator()(argument_type const&v)const{ - CI ci(this->kernel()); - return CP(this->kernel())(ci(v,Begin_tag()),ci(v,End_tag())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Vector_to_point_tag,(CartesianDKernelFunctors::Vector_to_point),(Point_tag,Vector_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Opposite_vector : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Opposite_vector) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_functor >::type CV; - typedef typename Get_functor >::type CI; - typedef Vector result_type; - typedef Vector argument_type; - result_type operator()(Vector const&v)const{ - CI ci(this->kernel()); - return CV(this->kernel())(make_transforming_iterator(ci(v,Begin_tag()),std::negate()),make_transforming_iterator(ci(v,End_tag()),std::negate())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Opposite_vector_tag,(CartesianDKernelFunctors::Opposite_vector),(Vector_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Scaled_vector : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Scaled_vector) - typedef R_ R; - typedef typename Get_type::type FT; - typedef typename Get_type::type Vector; - typedef typename Get_functor >::type CV; - typedef typename Get_functor >::type CI; - typedef Vector result_type; - typedef Vector first_argument_type; - typedef FT second_argument_type; - result_type operator()(Vector const&v,FT const& s)const{ - CI ci(this->kernel()); - return CV(this->kernel())(make_transforming_iterator(ci(v,Begin_tag()),Scale(s)),make_transforming_iterator(ci(v,End_tag()),Scale(s))); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Scaled_vector_tag,(CartesianDKernelFunctors::Scaled_vector),(Vector_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Sum_of_vectors : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Sum_of_vectors) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_functor >::type CV; - typedef typename Get_functor >::type CI; - typedef Vector result_type; - typedef Vector first_argument_type; - typedef Vector second_argument_type; - result_type operator()(Vector const&a, Vector const&b)const{ - CI ci(this->kernel()); - return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::plus()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::plus())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Sum_of_vectors_tag,(CartesianDKernelFunctors::Sum_of_vectors),(Vector_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Difference_of_vectors : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Difference_of_vectors) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_functor >::type CV; - typedef typename Get_functor >::type CI; - typedef Vector result_type; - typedef Vector first_argument_type; - typedef Vector second_argument_type; - result_type operator()(Vector const&a, Vector const&b)const{ - CI ci(this->kernel()); - return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::minus()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::minus())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Difference_of_vectors_tag,(CartesianDKernelFunctors::Difference_of_vectors),(Vector_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Translated_point : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Translated_point) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_type::type Point; - typedef typename Get_functor >::type CP; - typedef typename Get_functor >::type CVI; - typedef typename Get_functor >::type CPI; - typedef Point result_type; - typedef Point first_argument_type; - typedef Vector second_argument_type; - result_type operator()(Point const&a, Vector const&b)const{ - CVI cvi(this->kernel()); - CPI cpi(this->kernel()); - return CP(this->kernel())(make_transforming_pair_iterator(cpi(a,Begin_tag()),cvi(b,Begin_tag()),std::plus()),make_transforming_pair_iterator(cpi(a,End_tag()),cvi(b,End_tag()),std::plus())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Translated_point_tag,(CartesianDKernelFunctors::Translated_point),(Point_tag, Vector_tag),(Construct_ttag, Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Difference_of_points : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Difference_of_points) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_type::type Vector; - typedef typename Get_functor >::type CV; - typedef typename Get_functor >::type CI; - typedef Vector result_type; - typedef Point first_argument_type; - typedef Point second_argument_type; - result_type operator()(Point const&a, Point const&b)const{ - CI ci(this->kernel()); - return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::minus()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::minus())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Difference_of_points_tag,(CartesianDKernelFunctors::Difference_of_points),(Point_tag, Vector_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Midpoint : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Midpoint) - typedef R_ R; - typedef typename Get_type::type FT; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_functor >::type CP; - typedef typename Get_functor >::type CI; - typedef Point result_type; - typedef Point first_argument_type; - typedef Point second_argument_type; - // There is a division, but it will be cast to RT afterwards anyway, so maybe we could use RT. - struct Average : std::binary_function { - FT operator()(FT const&a, RT const&b)const{ - return (a+b)/2; - } - }; - result_type operator()(Point const&a, Point const&b)const{ - CI ci(this->kernel()); - //Divide half(2); - //return CP(this->kernel())(make_transforming_iterator(make_transforming_pair_iterator(ci.begin(a),ci.begin(b),std::plus()),half),make_transforming_iterator(make_transforming_pair_iterator(ci.end(a),ci.end(b),std::plus()),half)); - return CP(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),Average()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),Average())); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Midpoint_tag,(CartesianDKernelFunctors::Midpoint),(Point_tag),(Construct_ttag, Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Squared_length : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Squared_length) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_functor >::type CI; - typedef RT result_type; - typedef Vector argument_type; - result_type operator()(Vector const&a)const{ - CI ci(this->kernel()); - typename Algebraic_structure_traits::Square f; - // TODO: avoid this RT(0)+... - return std::accumulate(make_transforming_iterator(ci(a,Begin_tag()),f),make_transforming_iterator(ci(a,End_tag()),f),RT(0)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Squared_length_tag,(CartesianDKernelFunctors::Squared_length),(Vector_tag),(Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Squared_distance_to_origin : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Squared_distance_to_origin) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_functor >::type CI; - typedef RT result_type; - typedef Point argument_type; - result_type operator()(Point const&a)const{ - CI ci(this->kernel()); - typename Algebraic_structure_traits::Square f; - // TODO: avoid this RT(0)+... - return std::accumulate(make_transforming_iterator(ci(a,Begin_tag()),f),make_transforming_iterator(ci(a,End_tag()),f),RT(0)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Squared_distance_to_origin_tag,(CartesianDKernelFunctors::Squared_distance_to_origin),(Point_tag),(Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Squared_distance : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Squared_distance) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Point; - typedef typename Get_functor >::type CI; - typedef RT result_type; - typedef Point first_argument_type; - typedef Point second_argument_type; - struct Sq_diff : std::binary_function { - RT operator()(RT const&a, RT const&b)const{ - return CGAL::square(a-b); - } - }; - result_type operator()(Point const&a, Point const&b)const{ - CI ci(this->kernel()); - Sq_diff f; - // TODO: avoid this RT(0)+... - return std::accumulate(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),f),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),f),RT(0)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Squared_distance_tag,(CartesianDKernelFunctors::Squared_distance),(Point_tag),(Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Scalar_product : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Scalar_product) - typedef R_ R; - typedef typename Get_type::type RT; - typedef typename Get_type::type Vector; - typedef typename Get_functor >::type CI; - typedef RT result_type; - typedef Vector first_argument_type; - typedef Vector second_argument_type; - result_type operator()(Vector const&a, Vector const&b)const{ - CI ci(this->kernel()); - std::multiplies f; - // TODO: avoid this RT(0)+... - return std::accumulate( - make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),f), - make_transforming_pair_iterator(ci(a, End_tag()),ci(b, End_tag()),f), - RT(0)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Scalar_product_tag,(CartesianDKernelFunctors::Scalar_product),(Vector_tag),(Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Compare_distance : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Compare_distance) - typedef R_ R; - typedef typename Get_type::type Point; - typedef typename Get_functor::type CSD; - typedef typename Get_type::type result_type; - typedef Point first_argument_type; - typedef Point second_argument_type; - typedef Point third_argument_type; // why am I doing this already? - typedef Point fourth_argument_type; - result_type operator()(Point const&a, Point const&b, Point const&c)const{ - CSD csd(this->kernel()); - return CGAL_NTS compare(csd(a,b),csd(a,c)); - } - result_type operator()(Point const&a, Point const&b, Point const&c, Point const&d)const{ - CSD csd(this->kernel()); - return CGAL_NTS compare(csd(a,b),csd(c,d)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Compare_distance_tag,(CartesianDKernelFunctors::Compare_distance),(Point_tag),(Squared_distance_tag)); - -namespace CartesianDKernelFunctors { -template struct Less_point_cartesian_coordinate : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Less_point_cartesian_coordinate) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_functor::type Cc; - // TODO: This is_exact thing should be reengineered. - // the goal is to have a way to tell: don't filter this - typedef typename CGAL::Is_exact Is_exact; - - template - result_type operator()(V const&a, W const&b, I i)const{ - Cc c(this->kernel()); - return c(a,i)),(),(Compute_point_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Compare_point_cartesian_coordinate : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Compare_point_cartesian_coordinate) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_functor::type Cc; - // TODO: This is_exact thing should be reengineered. - // the goal is to have a way to tell: don't filter this - typedef typename CGAL::Is_exact Is_exact; - - template - result_type operator()(V const&a, W const&b, I i)const{ - Cc c(this->kernel()); - return CGAL_NTS compare(c(a,i),c(b,i)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Compare_point_cartesian_coordinate_tag,(CartesianDKernelFunctors::Compare_point_cartesian_coordinate),(),(Compute_point_cartesian_coordinate_tag)); - -namespace CartesianDKernelFunctors { -template struct Compare_lexicographically : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Compare_lexicographically) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_functor >::type CI; - // TODO: This is_exact thing should be reengineered. - // the goal is to have a way to tell: don't filter this - typedef typename CGAL::Is_exact Is_exact; - - template - result_type operator()(V const&a, W const&b)const{ - CI c(this->kernel()); -#ifdef CGAL_CXX11 - auto -#else - typename CI::result_type -#endif - a_begin=c(a,Begin_tag()), - b_begin=c(b,Begin_tag()), - a_end=c(a,End_tag()); - result_type res; - // can't we do slightly better for Uncertain<*> ? - // after res=...; if(is_uncertain(res))return indeterminate(); - do res=CGAL_NTS compare(*a_begin++,*b_begin++); - while(a_begin!=a_end && res==EQUAL); - return res; - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Compare_lexicographically_tag,(CartesianDKernelFunctors::Compare_lexicographically),(),(Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Less_lexicographically : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Less_lexicographically) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_functor::type CL; - typedef typename CGAL::Is_exact Is_exact; - - template - result_type operator() (V const&a, W const&b) const { - CL c (this->kernel()); - return c(a,b) < 0; - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Less_lexicographically_tag,(CartesianDKernelFunctors::Less_lexicographically),(),(Compare_lexicographically_tag)); - -namespace CartesianDKernelFunctors { -template struct Less_or_equal_lexicographically : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Less_or_equal_lexicographically) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_functor::type CL; - typedef typename CGAL::Is_exact Is_exact; - - template - result_type operator() (V const&a, W const&b) const { - CL c (this->kernel()); - return c(a,b) <= 0; - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Less_or_equal_lexicographically_tag,(CartesianDKernelFunctors::Less_or_equal_lexicographically),(),(Compare_lexicographically_tag)); - -namespace CartesianDKernelFunctors { -template struct Equal_points : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Equal_points) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_functor >::type CI; - // TODO: This is_exact thing should be reengineered. - // the goal is to have a way to tell: don't filter this - typedef typename CGAL::Is_exact Is_exact; - - template - result_type operator()(V const&a, W const&b)const{ - CI c(this->kernel()); -#ifdef CGAL_CXX11 - auto -#else - typename CI::result_type -#endif - a_begin=c(a,Begin_tag()), - b_begin=c(b,Begin_tag()), - a_end=c(a,End_tag()); - result_type res = true; - // Is using CGAL::possibly for Uncertain really an optimization? - do res = res & (*a_begin++ == *b_begin++); - while(a_begin!=a_end && possibly(res)); - return res; - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Equal_points_tag,(CartesianDKernelFunctors::Equal_points),(),(Construct_ttag)); - -namespace CartesianDKernelFunctors { -template struct Oriented_side : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Oriented_side) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_type::type Point; - typedef typename Get_type::type Hyperplane; - typedef typename Get_type::type Sphere; - typedef typename Get_functor::type VA; - typedef typename Get_functor::type HT; - typedef typename Get_functor::type SD; - typedef typename Get_functor::type SR; - typedef typename Get_functor::type CS; - - result_type operator()(Hyperplane const&h, Point const&p)const{ - HT ht(this->kernel()); - VA va(this->kernel()); - return CGAL::compare(va(h,p),ht(h)); - } - result_type operator()(Sphere const&s, Point const&p)const{ - SD sd(this->kernel()); - SR sr(this->kernel()); - CS cs(this->kernel()); - return CGAL::compare(sd(cs(s),p),sr(s)); - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Oriented_side_tag,(CartesianDKernelFunctors::Oriented_side),(Point_tag,Sphere_tag,Hyperplane_tag),(Value_at_tag,Hyperplane_translation_tag,Squared_distance_tag,Squared_radius_tag,Center_of_sphere_tag)); - -namespace CartesianDKernelFunctors { -template struct Has_on_positive_side : private Store_kernel { - CGAL_FUNCTOR_INIT_STORE(Has_on_positive_side) - typedef R_ R; - typedef typename Get_type::type result_type; - typedef typename Get_functor::type OS; - - template - result_type operator()(Obj const&o, Pt const&p)const{ - OS os(this->kernel()); - return os(o,p) == ON_POSITIVE_SIDE; - } -}; -} - -CGAL_KD_DEFAULT_FUNCTOR(Has_on_positive_side_tag,(CartesianDKernelFunctors::Has_on_positive_side),(),(Oriented_side_tag)); - -} -#include -#endif // CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h deleted file mode 100644 index c25c4e2b..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_EXACTNESS_H -#define CGAL_EXACTNESS_H -#include -#include -namespace CGAL { - -#define CGAL_STRAWBERRY(Is_pretty) \ - namespace internal { \ - BOOST_MPL_HAS_XXX_TRAIT_DEF(Is_pretty) \ - } \ - template::value> \ - struct Is_pretty : boost::false_type {}; \ - template \ - struct Is_pretty : T::Is_pretty {} - -CGAL_STRAWBERRY(Is_exact); -CGAL_STRAWBERRY(Is_fast); -CGAL_STRAWBERRY(Is_stored); -#undef CGAL_STRAWBERRY -} -#endif // CGAL_EXACTNESS_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h deleted file mode 100644 index b8e17886..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h +++ /dev/null @@ -1,363 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_FUNCTOR_TAGS_H -#define CGAL_FUNCTOR_TAGS_H -#include // for Null_tag -#include -#ifdef CGAL_CXX11 -#include -#include -#endif -#include -#include -#include -#include -#include -#include -#include -#include -namespace CGAL { - - // Find a better place for this later - - template struct Get_type - : K::template Type {}; - template struct Get_functor - : K::template Functor {}; -#ifdef CGAL_CXX11 - template using Type = typename Get_type::type; - template using Functor = typename Get_functor::type; -#endif - - class Null_type {~Null_type();}; // no such object should be created - - // To construct iterators - struct Begin_tag {}; - struct End_tag {}; - - // Functor category - struct Predicate_tag {}; - struct Construct_tag {}; - struct Construct_iterator_tag {}; - struct Compute_tag {}; - struct Misc_tag {}; - - struct No_filter_tag {}; - - templatestruct Construct_ttag {}; - templatestruct Convert_ttag {}; - - template struct Get_functor_category { typedef Misc_tag type; }; - template struct Typedef_tag_type; - //template struct Read_tag_type {}; - - template - struct Provides_type - : Has_type_different_from, Null_type> {}; - - template - struct Provides_functor - : Has_type_different_from, Null_functor> {}; - - template::type::value> - struct Provides_functors : boost::mpl::and_ < - Provides_functor::type>, - Provides_functors::type> > {}; - template - struct Provides_functors : boost::true_type {}; - - template::type::value> - struct Provides_types : boost::mpl::and_ < - Provides_type::type>, - Provides_types::type> > {}; - template - struct Provides_types : boost::true_type {}; - - namespace internal { BOOST_MPL_HAS_XXX_TRAIT_NAMED_DEF(has_Type,template Type,false) } - template::value /* false */> - struct Provides_type_i : boost::false_type {}; - template - struct Provides_type_i - : Has_type_different_from, Null_type> {}; - - //// This version does not like Functor - //namespace internal { BOOST_MPL_HAS_XXX_TEMPLATE_NAMED_DEF(has_Functor,Functor,false) } - // This version lets us use non-type template parameters, but fails with older EDG-based compilers (Intel 14). - namespace internal { BOOST_MPL_HAS_XXX_TRAIT_NAMED_DEF(has_Functor,template Functor,false) } - - template::value /* false */> - struct Provides_functor_i : boost::false_type {}; - template - struct Provides_functor_i - : Has_type_different_from, Null_functor> {}; - - // TODO: Refine this a bit. - template ::value, - //bool=Provides_functor_i::value, - bool = internal::has_Functor::value - > - struct Inherit_functor : K::template Functor {}; - template - struct Inherit_functor {}; - - template ::value> - struct Inherit_type : K::template Type {}; - template - struct Inherit_type {}; - - struct Number_tag {}; - struct Discrete_tag {}; - struct Object_tag {}; - template struct Get_type_category { - // The lazy kernel uses it too eagerly, - // so it currently needs a default. - typedef Null_tag type; - }; - -#define CGAL_DECL_OBJ_(X,Y) \ - template \ - struct Typedef_tag_type : Base { typedef Obj X; }; \ - template \ - struct Get_type_category { typedef Y##_tag type; } -#define CGAL_DECL_OBJ(X,Y) struct X##_tag {}; \ - CGAL_DECL_OBJ_(X,Y) - - //namespace has_object { BOOST_MPL_HAS_XXX_TRAIT_DEF(X) } - //template - //struct Provides_tag_type : has_object::has_##X {}; - //template - //struct Read_tag_type { typedef typename Kernel::X type; } - - // Not exactly objects, but the extras can't hurt. - CGAL_DECL_OBJ(FT, Number); - CGAL_DECL_OBJ(RT, Number); - - CGAL_DECL_OBJ(Bool, Discrete); // Boolean_tag is already taken, and is a template :-( - CGAL_DECL_OBJ(Comparison_result, Discrete); - CGAL_DECL_OBJ(Sign, Discrete); - CGAL_DECL_OBJ(Orientation, Discrete); // Note: duplicate with the functor tag! - CGAL_DECL_OBJ(Oriented_side, Discrete); - CGAL_DECL_OBJ(Bounded_side, Discrete); - CGAL_DECL_OBJ(Angle, Discrete); - CGAL_DECL_OBJ(Flat_orientation, Discrete); - - CGAL_DECL_OBJ(Vector, Object); - CGAL_DECL_OBJ(Point, Object); - CGAL_DECL_OBJ(Segment, Object); - CGAL_DECL_OBJ(Sphere, Object); - CGAL_DECL_OBJ(Line, Object); - CGAL_DECL_OBJ(Direction, Object); - CGAL_DECL_OBJ(Hyperplane, Object); - CGAL_DECL_OBJ(Ray, Object); - CGAL_DECL_OBJ(Iso_box, Object); - CGAL_DECL_OBJ(Bbox, Object); - CGAL_DECL_OBJ(Aff_transformation, Object); - CGAL_DECL_OBJ(Weighted_point, Object); -#undef CGAL_DECL_OBJ_ -#undef CGAL_DECL_OBJ - -// Intel fails with those, and they are not so useful. -// CGAL_KD_DEFAULT_TYPE(RT_tag,(typename Get_type::type),(),()); -// CGAL_KD_DEFAULT_TYPE(FT_tag,(CGAL::Quotient::type>),(),()); - -#define CGAL_SMURF2(A,B) CGAL_KD_DEFAULT_TYPE(A##_tag,(typename Same_uncertainty_nt::type>::type),(RT_tag),()) -#define CGAL_SMURF1(A) CGAL_SMURF2(A,CGAL::A) - CGAL_SMURF2(Bool, bool); - CGAL_SMURF1(Sign); - CGAL_SMURF1(Comparison_result); - CGAL_SMURF1(Orientation); - CGAL_SMURF1(Oriented_side); - CGAL_SMURF1(Bounded_side); - CGAL_SMURF1(Angle); -#undef CGAL_SMURF1 -#undef CGAL_SMURF2 - - // TODO: replace with Get_type_category - template struct is_NT_tag { enum { value = false }; }; - template<> struct is_NT_tag { enum { value = true }; }; - template<> struct is_NT_tag { enum { value = true }; }; - - template struct iterator_tag_traits { - enum { is_iterator = false, has_nth_element = false }; - typedef Null_tag value_tag; - }; - -#define CGAL_DECL_COMPUTE(X) struct X##_tag {}; \ - templatestruct Get_functor_category{typedef Compute_tag type;} - CGAL_DECL_COMPUTE(Compute_point_cartesian_coordinate); - CGAL_DECL_COMPUTE(Compute_vector_cartesian_coordinate); - CGAL_DECL_COMPUTE(Compute_homogeneous_coordinate); - CGAL_DECL_COMPUTE(Squared_distance); - CGAL_DECL_COMPUTE(Squared_distance_to_origin); - CGAL_DECL_COMPUTE(Squared_length); - CGAL_DECL_COMPUTE(Squared_radius); - CGAL_DECL_COMPUTE(Squared_circumradius); - CGAL_DECL_COMPUTE(Scalar_product); - CGAL_DECL_COMPUTE(Hyperplane_translation); - CGAL_DECL_COMPUTE(Value_at); - CGAL_DECL_COMPUTE(Point_weight); - CGAL_DECL_COMPUTE(Power_distance); - CGAL_DECL_COMPUTE(Power_distance_to_point); -#undef CGAL_DECL_COMPUTE - -#define CGAL_DECL_ITER_OBJ(X,Y,Z,C) struct X##_tag {}; \ - template<>struct iterator_tag_traits { \ - enum { is_iterator = true, has_nth_element = true }; \ - typedef Y##_tag value_tag; \ - typedef Z##_tag nth_element; \ - typedef C##_tag container; \ - }; \ - template \ - struct Typedef_tag_type : Base { typedef Obj X; } - - //namespace has_object { BOOST_MPL_HAS_XXX_TRAIT_DEF(X) } - //template - //struct Provides_tag_type : has_object::has_##X {}; - //template - //struct Read_tag_type { typedef typename Kernel::X type; } - - CGAL_DECL_ITER_OBJ(Vector_cartesian_const_iterator, FT, Compute_vector_cartesian_coordinate, Vector); - CGAL_DECL_ITER_OBJ(Point_cartesian_const_iterator, FT, Compute_point_cartesian_coordinate, Point); -#undef CGAL_DECL_ITER_OBJ - - templatestruct map_result_tag{typedef Null_type type;}; - templatestruct map_result_tag >{typedef T type;}; - - templatestruct Get_functor_category,B,C> : - boost::mpl::if_c::is_iterator, - Construct_iterator_tag, - Construct_tag> {}; - - // Really? - templatestruct Get_functor_category,B,C>{typedef Misc_tag type;}; - -#define CGAL_DECL_CONSTRUCT(X,Y) struct X##_tag {}; \ - template<>struct map_result_tag{typedef Y##_tag type;}; \ - templatestruct Get_functor_category{typedef Construct_tag type;} - CGAL_DECL_CONSTRUCT(Midpoint,Point); - CGAL_DECL_CONSTRUCT(Center_of_sphere,Point); - CGAL_DECL_CONSTRUCT(Point_of_sphere,Point); - CGAL_DECL_CONSTRUCT(Segment_extremity,Point); - CGAL_DECL_CONSTRUCT(Sum_of_vectors,Vector); - CGAL_DECL_CONSTRUCT(Difference_of_vectors,Vector); - CGAL_DECL_CONSTRUCT(Opposite_vector,Vector); - CGAL_DECL_CONSTRUCT(Scaled_vector,Vector); - CGAL_DECL_CONSTRUCT(Orthogonal_vector,Vector); - CGAL_DECL_CONSTRUCT(Difference_of_points,Vector); - CGAL_DECL_CONSTRUCT(Translated_point,Point); - CGAL_DECL_CONSTRUCT(Point_to_vector,Vector); - CGAL_DECL_CONSTRUCT(Vector_to_point,Point); - CGAL_DECL_CONSTRUCT(Construct_min_vertex,Point); - CGAL_DECL_CONSTRUCT(Construct_max_vertex,Point); - CGAL_DECL_CONSTRUCT(Construct_circumcenter,Point); - CGAL_DECL_CONSTRUCT(Point_drop_weight,Point); - CGAL_DECL_CONSTRUCT(Power_center,Weighted_point); -#undef CGAL_DECL_CONSTRUCT -#if 0 -#define CGAL_DECL_ITER_CONSTRUCT(X,Y) struct X##_tag {}; \ - template<>struct map_result_tag{typedef Y##_tag type;}; \ - template<>struct map_functor_type{typedef Construct_iterator_tag type;} - CGAL_DECL_ITER_CONSTRUCT(Construct_point_cartesian_const_iterator,Point_cartesian_const_iterator); - CGAL_DECL_ITER_CONSTRUCT(Construct_vector_cartesian_const_iterator,Vector_cartesian_const_iterator); -#undef CGAL_DECL_ITER_CONSTRUCT -#endif - - //FIXME: choose a convention: prefix with Predicate_ ? -#define CGAL_DECL_PREDICATE_(X) \ - templatestruct Get_functor_category{typedef Predicate_tag type;} -#define CGAL_DECL_PREDICATE(X) struct X##_tag {}; \ - CGAL_DECL_PREDICATE_(X) - CGAL_DECL_PREDICATE(Less_point_cartesian_coordinate); - CGAL_DECL_PREDICATE(Compare_point_cartesian_coordinate); - CGAL_DECL_PREDICATE(Compare_distance); - CGAL_DECL_PREDICATE(Compare_lexicographically); - CGAL_DECL_PREDICATE(Less_lexicographically); - CGAL_DECL_PREDICATE(Less_or_equal_lexicographically); - CGAL_DECL_PREDICATE(Equal_points); - CGAL_DECL_PREDICATE(Has_on_positive_side); - CGAL_DECL_PREDICATE_(Orientation); // duplicate with the type - CGAL_DECL_PREDICATE_(Oriented_side); // duplicate with the type - CGAL_DECL_PREDICATE(Orientation_of_points); - CGAL_DECL_PREDICATE(Orientation_of_vectors); - CGAL_DECL_PREDICATE(Side_of_oriented_sphere); - CGAL_DECL_PREDICATE(Side_of_bounded_sphere); - CGAL_DECL_PREDICATE(Side_of_bounded_circumsphere); - CGAL_DECL_PREDICATE(Contained_in_affine_hull); - CGAL_DECL_PREDICATE(In_flat_orientation); - CGAL_DECL_PREDICATE(In_flat_side_of_oriented_sphere); - CGAL_DECL_PREDICATE(Construct_flat_orientation); // Making it a predicate is a questionable choice, it should be possible to let it be a construction for some implementations. Not sure how to do that... TODO - CGAL_DECL_PREDICATE(Linear_rank); - CGAL_DECL_PREDICATE(Affine_rank); - CGAL_DECL_PREDICATE(Linearly_independent); - CGAL_DECL_PREDICATE(Affinely_independent); - CGAL_DECL_PREDICATE(Contained_in_linear_hull); - CGAL_DECL_PREDICATE(Contained_in_simplex); - CGAL_DECL_PREDICATE(Power_side_of_power_sphere_raw); - CGAL_DECL_PREDICATE(Power_side_of_power_sphere); - CGAL_DECL_PREDICATE(In_flat_power_side_of_power_sphere_raw); - CGAL_DECL_PREDICATE(In_flat_power_side_of_power_sphere); -#undef CGAL_DECL_PREDICATE - -#define CGAL_DECL_MISC(X) struct X##_tag {}; \ - templatestruct Get_functor_category{typedef Misc_tag type;} - //TODO: split into _begin and _end ? - //CGAL_DECL_MISC(Construct_point_cartesian_const_iterator); - //CGAL_DECL_MISC(Construct_vector_cartesian_const_iterator); - CGAL_DECL_MISC(Point_dimension); - CGAL_DECL_MISC(Vector_dimension); - CGAL_DECL_MISC(Linear_base); // Find a more appropriate category? -#undef CGAL_DECL_MISC - - - // Properties for LA - struct Has_extra_dimension_tag {}; - struct Has_vector_plus_minus_tag {}; - struct Has_vector_scalar_ops_tag {}; - struct Has_dot_product_tag {}; - struct Has_determinant_of_vectors_tag {}; - struct Has_determinant_of_points_tag {}; - struct Has_determinant_of_iterator_to_vectors_tag {}; - struct Has_determinant_of_iterator_to_points_tag {}; - struct Has_determinant_of_vectors_omit_last_tag {}; - struct Stores_squared_norm_tag {}; - - template struct Preserved_by_non_linear_extra_coordinate - : boost::false_type {}; - template<> struct Preserved_by_non_linear_extra_coordinate - : boost::true_type {}; - template<> struct Preserved_by_non_linear_extra_coordinate - : boost::true_type {}; - template<> struct Preserved_by_non_linear_extra_coordinate - : boost::true_type {}; - template<> struct Preserved_by_non_linear_extra_coordinate - : boost::true_type {}; - template<> struct Preserved_by_non_linear_extra_coordinate - : boost::true_type {}; - template<> struct Preserved_by_non_linear_extra_coordinate - : boost::true_type {}; - - // Kernel properties - struct Point_stores_squared_distance_to_origin_tag {}; - -} -#endif // CGAL_FUNCTOR_TAGS_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h deleted file mode 100644 index 21858804..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_STATIC_INT_H -#define CGAL_STATIC_INT_H -#include - -namespace CGAL { -template struct static_zero { - operator NT() const { return constant(); } -}; -template struct static_one { - operator NT() const { return constant(); } -}; - -template static_zero operator-(static_zero) { return static_zero(); } - -template NT operator+(NT const& x, static_zero) { return x; } -template NT operator+(static_zero, NT const& x) { return x; } -template static_zero operator+(static_zero, static_zero) { return static_zero(); } -template static_one operator+(static_zero, static_one) { return static_one(); } -template static_one operator+(static_one, static_zero) { return static_one(); } - -template NT operator-(NT const& x, static_zero) { return x; } -template NT operator-(static_zero, NT const& x) { return -x; } -template static_zero operator-(static_zero, static_zero) { return static_zero(); } -template static_zero operator-(static_one, static_one) { return static_zero(); } -template static_one operator-(static_one, static_zero) { return static_one(); } - -template NT operator*(NT const& x, static_one) { return x; } -template NT operator*(static_one, NT const& x) { return x; } -template static_zero operator*(NT const&, static_zero) { return static_zero(); } -template static_zero operator*(static_zero, NT const&) { return static_zero(); } -template static_zero operator*(static_zero, static_zero) { return static_zero(); } -template static_one operator*(static_one, static_one) { return static_one(); } -template static_zero operator*(static_zero, static_one) { return static_zero(); } -template static_zero operator*(static_one, static_zero) { return static_zero(); } - -template NT operator/(NT const& x, static_one) { return x; } -template static_zero operator/(static_zero, NT const&) { return static_zero(); } -template static_zero operator/(static_zero, static_one) { return static_zero(); } -template static_one operator/(static_one, static_one) { return static_one(); } - -} -#endif // CGAL_STATIC_INT_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h deleted file mode 100644 index 253e1282..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_STORE_KERNEL_H -#define CGAL_STORE_KERNEL_H - -#include -#include - -namespace CGAL { -namespace internal { -BOOST_MPL_HAS_XXX_TRAIT_DEF(Do_not_store_kernel) -template::value,bool=has_Do_not_store_kernel::value> struct Do_not_store_kernel { - enum { value=false }; - typedef Tag_false type; -}; -template struct Do_not_store_kernel { - enum { value=true }; - typedef Tag_true type; -}; -template struct Do_not_store_kernel { - typedef typename T::Do_not_store_kernel type; - enum { value=type::value }; -}; -} - -template::value> -struct Store_kernel { - Store_kernel(){} - Store_kernel(R_ const&){} - enum { kernel_is_stored = false }; - R_ kernel()const{return R_();} - typedef R_ reference_type; - void set_kernel(R_ const&){} -}; -template -struct Store_kernel { - Store_kernel():rp(0){ - CGAL_warning_msg(true,"I should know my kernel"); - } - Store_kernel(R_ const& r):rp(&r){} - enum { kernel_is_stored = true }; - R_ const& kernel()const{ - CGAL_warning_msg(rp!=0,"I should know my kernel"); - return *rp; - } - typedef R_ const& reference_type; - void set_kernel(R_ const&r){rp=&r;} - private: - R_ const* rp; -}; - -//For a second kernel. TODO: find something more elegant -template::value> -struct Store_kernel2 { - Store_kernel2(){} - Store_kernel2(R_ const&){} - enum { kernel2_is_stored = false }; - R_ kernel2()const{return R_();} - typedef R_ reference2_type; - void set_kernel2(R_ const&){} -}; -template -struct Store_kernel2 { - Store_kernel2(){ - //CGAL_warning_msg(true,"I should know my kernel"); - } - Store_kernel2(R_ const& r):rp(&r){} - enum { kernel2_is_stored = true }; - R_ const& kernel2()const{ - CGAL_warning_msg(rp==0,"I should know my kernel"); - return *rp; - } - typedef R_ const& reference2_type; - void set_kernel2(R_ const&r){rp=&r;} - private: - R_ const* rp; -}; -} -#define CGAL_BASE_INIT(X,Y) \ - X():Y(){} \ - X(R_ const&r):Y(r){} -#define CGAL_FUNCTOR_INIT_STORE(X) CGAL_BASE_INIT(X,Store_kernel) -#define CGAL_FUNCTOR_INIT_IGNORE(X) \ - X(){} \ - X(R_ const&){} - -#endif // CGAL_STORE_KERNEL_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h deleted file mode 100644 index 238a2230..00000000 --- a/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_MARCUTILS -#define CGAL_MARCUTILS - -#include - -#if defined(BOOST_MSVC) -# pragma warning(push) -# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I' - // http://lists.boost.org/boost-users/2014/11/83291.php -#endif - -#ifdef CGAL_CXX11 -#include -#include -#define CGAL_FORWARDABLE(T) T&& -#define CGAL_FORWARD(T,t) std::forward(t) -#define CGAL_MOVE(t) std::move(t) -#define CGAL_CONSTEXPR constexpr -#else -#define CGAL_FORWARDABLE(T) T const& -#define CGAL_FORWARD(T,t) t -#define CGAL_MOVE(t) t -#define CGAL_CONSTEXPR -#endif -#include -#include -#include -#include -#include -#include -#include - -#ifdef CGAL_CXX11 -#define CGAL_BOOSTD std:: -#else -#define CGAL_BOOSTD boost:: -#endif - -namespace CGAL { -namespace internal { - BOOST_MPL_HAS_XXX_TRAIT_DEF(type) -} - -template ::value /*false*/> -struct Has_type_different_from : boost::false_type {}; -template -struct Has_type_different_from -: boost::mpl::not_ > {}; - - - template struct Wrap_type { typedef T type; }; - - // tell a function f(a,b,c) that its real argument is a(b,c) - struct Eval_functor {}; - - // forget the first argument. Useful to make something dependant - // (and thus usable in SFINAE), although that's not a great design. - template struct Second_arg { - typedef B type; - }; - - // like std::forward, except for basic types where it does a cast, to - // avoid issues with narrowing conversions -#ifdef CGAL_CXX11 - template inline - typename std::conditional::value&&std::is_arithmetic::type>::value,T,U&&>::type - forward_safe(V&& u) { return std::forward(u); } -#else - template inline U const& forward_safe(U const& u) { - return u; - } -#endif - -#ifdef CGAL_CXX11 - template struct Constructible_from_each; - template struct Constructible_from_each{ - enum { value=std::is_convertible::value&&Constructible_from_each::value }; - }; - template struct Constructible_from_each{ - enum { value=true }; - }; -#else -// currently only used in C++0X code -#endif - - template struct Scale { -#ifndef CGAL_CXX11 - template struct result; - template struct result { - typedef FT type; - }; -#endif - T const& scale; - Scale(T const& t):scale(t){} - template -#ifdef CGAL_CXX11 - auto operator()(FT&& x)const->decltype(scale*std::forward(x)) -#else - FT operator()(FT const& x)const -#endif - { - return scale*CGAL_FORWARD(FT,x); - } - }; - template struct Divide { -#if !defined(CGAL_CXX11) || !defined(BOOST_RESULT_OF_USE_DECLTYPE) - // requires boost > 1.44 - // shouldn't be needed with C++0X - //template struct result; - //template struct result { - // typedef FT type; - //}; - typedef NT result_type; -#endif - T const& scale; - Divide(T const& t):scale(t){} - template -#ifdef CGAL_CXX11 - //FIXME: gcc complains for Gmpq - //auto operator()(FT&& x)const->decltype(Rational_traits().make_rational(std::forward(x),scale)) - NT operator()(FT&& x)const -#else - NT operator()(FT const& x)const -#endif - { - return Rational_traits(). - make_rational(CGAL_FORWARD(FT,x),scale); - } - }; - - template struct has_cheap_constructor : boost::is_arithmetic{}; - template struct has_cheap_constructor > { - enum { value=true }; - }; - - // like std::multiplies but allows mixing types - // in C++11 in doesn't need to be a template - template < class Ret > - struct multiplies { - template -#ifdef CGAL_CXX11 - auto operator()(A&&a,B&&b)const->decltype(std::forward(a)*std::forward(b)) -#else - Ret operator()(A const& a, B const& b)const -#endif - { - return CGAL_FORWARD(A,a)*CGAL_FORWARD(B,b); - } - }; - template < class Ret > - struct division { - template -#ifdef CGAL_CXX11 - auto operator()(A&&a,B&&b)const->decltype(std::forward(a)/std::forward(b)) -#else - Ret operator()(A const& a, B const& b)const -#endif - { - return CGAL_FORWARD(A,a)/CGAL_FORWARD(B,b); - } - }; - -#ifdef CGAL_CXX11 - using std::decay; -#else - template struct decay : boost::remove_cv::type> {}; -#endif - - template struct Type_copy_ref { typedef U type; }; - template struct Type_copy_ref { typedef U& type; }; -#ifdef CGAL_CXX11 - template struct Type_copy_ref { typedef U&& type; }; -#endif - template struct Type_copy_cv { typedef U type; }; - template struct Type_copy_cv { typedef U const type; }; - template struct Type_copy_cv { typedef U volatile type; }; - template struct Type_copy_cv { typedef U const volatile type; }; - - template struct Type_copy_cvref : - Type_copy_ref::type,U>::type> {}; - - struct Dereference_functor { - template struct result{}; - template struct result { - typedef typename std::iterator_traits::reference type; - }; - template typename result::type - operator()(It const&i)const{ - return *i; - } - }; - -#ifdef CGAL_CXX11 - template struct Indices{}; - template struct Next_increasing_indices; - template struct Next_increasing_indices > { - typedef Indices type; - }; - template struct N_increasing_indices { - typedef typename Next_increasing_indices::type>::type type; - }; - template<> struct N_increasing_indices<0> { typedef Indices<> type; }; - namespace internal { - template inline typename std::result_of::type - do_call_on_tuple_elements(F&&f, std::tuple&&t, Indices&&) { - return f(std::get(std::move(t))...); - } - } // internal - template - inline typename std::result_of::type - call_on_tuple_elements(F&&f, std::tuple&&t) { - return internal::do_call_on_tuple_elements(std::forward(f),std::move(t), - typename N_increasing_indices::type()); - } -#else -#define CGAL_VAR(Z,N,_) cpp0x::get(t) -#define CGAL_CODE(Z,N,_) template \ - inline Res call_on_tuple_elements(F const&f, \ - cpp0x::tuple const&t) { \ - return f(BOOST_PP_ENUM(N,CGAL_VAR,)); \ - } - template - inline Res call_on_tuple_elements(F const&f, cpp0x::tuple<>) { - return f(); - } -BOOST_PP_REPEAT_FROM_TO(1, 8, CGAL_CODE, _ ) -#undef CGAL_CODE -#undef CGAL_VAR -#endif - - template struct Factory { - typedef A result_type; -#ifdef CGAL_CXX11 - template result_type operator()(U&&...u)const{ - return A(std::forward(u)...); - } -#else - result_type operator()()const{ - return A(); - } -#define CGAL_CODE(Z,N,_) template \ - result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ - return A(BOOST_PP_ENUM_PARAMS(N,u)); \ - } -BOOST_PP_REPEAT_FROM_TO(1, 8, CGAL_CODE, _ ) -#undef CGAL_CODE -#endif - }; -} - -// TODO: make a Cartesian-only variant -// WARNING: do not use the Req* parameters too much, they can cause circular instanciations and are only useful for dispatching. -#define CGAL_STRIP_PAREN_(...) __VA_ARGS__ -#define CGAL_STRIP_PAREN(...) CGAL_STRIP_PAREN_ __VA_ARGS__ -// What to do with O? pass it down to other functors or drop it? -#define CGAL_KD_DEFAULT_FUNCTOR(Tg,Name,ReqTyp,ReqFun) \ - template \ - struct Get_functor::value \ - || !Provides_types >::value \ - || !Provides_functors >::value \ - , int, void>::type> \ - { \ - typedef CGAL_STRIP_PAREN_ Name type; \ - typedef K Bound_kernel; \ - } - -// Not used yet, may need some changes. -#define CGAL_KD_DEFAULT_TYPE(Tg,Name,ReqTyp,ReqFun) \ - template \ - struct Get_type::value \ - || !Provides_types >::value \ - || !Provides_functors >::value \ - , int, void>::type> \ - { \ - typedef CGAL_STRIP_PAREN_ Name type; \ - typedef K Bound_kernel; \ - } - -#if defined(BOOST_MSVC) -# pragma warning(pop) -#endif - -#endif diff --git a/src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h b/src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h deleted file mode 100644 index e29ce14f..00000000 --- a/src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h +++ /dev/null @@ -1,620 +0,0 @@ -// Copyright (c) 2002,2011 Utrecht University (The Netherlands). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// -// Author(s) : Hans Tangelder () - -#ifndef CGAL_ORTHOGONAL_INCREMENTAL_NEIGHBOR_SEARCH -#define CGAL_ORTHOGONAL_INCREMENTAL_NEIGHBOR_SEARCH - -#include -#include -#include -#include -#include -#include -#include - -namespace CGAL { - - template ::type, - class Splitter_ = Sliding_midpoint, - class Tree_= Kd_tree > - class Orthogonal_incremental_neighbor_search { - - public: - typedef Splitter_ Splitter; - typedef Tree_ Tree; - typedef Distance_ Distance; - typedef typename SearchTraits::Point_d Point_d; - typedef typename Distance::Query_item Query_item; - typedef typename SearchTraits::FT FT; - typedef typename Tree::Point_d_iterator Point_d_iterator; - typedef typename Tree::Node_const_handle Node_const_handle; - - typedef std::pair Point_with_transformed_distance; - typedef CGAL::cpp11::tuple > Node_with_distance; - typedef std::vector Node_with_distance_vector; - typedef std::vector Point_with_transformed_distance_vector; - - template - struct Object_wrapper - { - T object; - Object_wrapper(const T& t):object(t){} - const T& operator* () const { return object; } - const T* operator-> () const { return &object; } - }; - - class Iterator_implementation { - SearchTraits traits; - public: - - int number_of_neighbours_computed; - int number_of_internal_nodes_visited; - int number_of_leaf_nodes_visited; - int number_of_items_visited; - - private: - - typedef std::vector Distance_vector; - - Distance_vector dists; - - Distance Orthogonal_distance_instance; - - FT multiplication_factor; - - Query_item query_point; - - FT distance_to_root; - - bool search_nearest_neighbour; - - FT rd; - - - class Priority_higher { - public: - - bool search_nearest; - - Priority_higher(bool search_the_nearest_neighbour) - : search_nearest(search_the_nearest_neighbour) - {} - - //highest priority is smallest distance - bool - operator() (Node_with_distance* n1, Node_with_distance* n2) const - { - return (search_nearest) ? (CGAL::cpp11::get<1>(*n1) > CGAL::cpp11::get<1>(*n2)) : (CGAL::cpp11::get<1>(*n2) > CGAL::cpp11::get<1>(*n1)); - } - }; - - class Distance_smaller { - - public: - - bool search_nearest; - - Distance_smaller(bool search_the_nearest_neighbour) - : search_nearest(search_the_nearest_neighbour) - {} - - //highest priority is smallest distance - bool operator() (Point_with_transformed_distance* p1, Point_with_transformed_distance* p2) const - { - return (search_nearest) ? (p1->second > p2->second) : (p2->second > p1->second); - } - }; - - - std::priority_queue PriorityQueue; - - public: - std::priority_queue Item_PriorityQueue; - - - public: - - int reference_count; - - - - // constructor - Iterator_implementation(const Tree& tree,const Query_item& q, const Distance& tr, - FT Eps=FT(0.0), bool search_nearest=true) - : traits(tree.traits()),number_of_neighbours_computed(0), number_of_internal_nodes_visited(0), - number_of_leaf_nodes_visited(0), number_of_items_visited(0), - Orthogonal_distance_instance(tr), multiplication_factor(Orthogonal_distance_instance.transformed_distance(FT(1.0)+Eps)), - query_point(q), search_nearest_neighbour(search_nearest), - PriorityQueue(Priority_higher(search_nearest)), Item_PriorityQueue(Distance_smaller(search_nearest)), - reference_count(1) - - - { - if (tree.empty()) return; - - typename SearchTraits::Construct_cartesian_const_iterator_d ccci=traits.construct_cartesian_const_iterator_d_object(); - int dim = static_cast(std::distance(ccci(q), ccci(q,0))); - - dists.resize(dim); - for(int i=0 ; i(*The_Root); - Compute_the_next_nearest_neighbour(); - } - else{ - distance_to_root= - Orthogonal_distance_instance.max_distance_to_rectangle(q, - tree.bounding_box(), dists); - Node_with_distance *The_Root = new Node_with_distance(tree.root(), - distance_to_root, dists); - PriorityQueue.push(The_Root); - - // rd is the distance of the top of the priority queue to q - rd=CGAL::cpp11::get<1>(*The_Root); - Compute_the_next_furthest_neighbour(); - } - - - } - - // * operator - const Point_with_transformed_distance& - operator* () const - { - return *(Item_PriorityQueue.top()); - } - - // prefix operator - Iterator_implementation& - operator++() - { - Delete_the_current_item_top(); - if(search_nearest_neighbour) - Compute_the_next_nearest_neighbour(); - else - Compute_the_next_furthest_neighbour(); - return *this; - } - - // postfix operator - Object_wrapper - operator++(int) - { - Object_wrapper result( *(Item_PriorityQueue.top()) ); - ++*this; - return result; - } - - // Print statistics of the general priority search process. - std::ostream& - statistics (std::ostream& s) const { - s << "Orthogonal priority search statistics:" - << std::endl; - s << "Number of internal nodes visited:" - << number_of_internal_nodes_visited << std::endl; - s << "Number of leaf nodes visited:" - << number_of_leaf_nodes_visited << std::endl; - s << "Number of items visited:" - << number_of_items_visited << std::endl; - s << "Number of neighbours computed:" - << number_of_neighbours_computed << std::endl; - return s; - } - - - //destructor - ~Iterator_implementation() - { - while (!PriorityQueue.empty()) { - Node_with_distance* The_top=PriorityQueue.top(); - PriorityQueue.pop(); - delete The_top; - } - while (!Item_PriorityQueue.empty()) { - Point_with_transformed_distance* The_top=Item_PriorityQueue.top(); - Item_PriorityQueue.pop(); - delete The_top; - } - } - - private: - - void - Delete_the_current_item_top() - { - Point_with_transformed_distance* The_item_top=Item_PriorityQueue.top(); - Item_PriorityQueue.pop(); - delete The_item_top; - } - - void - Compute_the_next_nearest_neighbour() - { - // compute the next item - bool next_neighbour_found=false; - if (!(Item_PriorityQueue.empty())) { - next_neighbour_found= - (multiplication_factor*rd > Item_PriorityQueue.top()->second); - } - typename SearchTraits::Construct_cartesian_const_iterator_d construct_it=traits.construct_cartesian_const_iterator_d_object(); - typename SearchTraits::Cartesian_const_iterator_d query_point_it = construct_it(query_point); - // otherwise browse the tree further - while ((!next_neighbour_found) && (!PriorityQueue.empty())) { - Node_with_distance* The_node_top=PriorityQueue.top(); - Node_const_handle N= CGAL::cpp11::get<0>(*The_node_top); - dists = CGAL::cpp11::get<2>(*The_node_top); - PriorityQueue.pop(); - delete The_node_top; - FT copy_rd=rd; - while (!(N->is_leaf())) { // compute new distance - typename Tree::Internal_node_const_handle node = - static_cast(N); - number_of_internal_nodes_visited++; - int new_cut_dim=node->cutting_dimension(); - FT new_rd,dst = dists[new_cut_dim]; - FT val = *(query_point_it + new_cut_dim); - FT diff1 = val - node->upper_low_value(); - FT diff2 = val - node->lower_high_value(); - if (diff1 + diff2 < FT(0.0)) { - new_rd= - Orthogonal_distance_instance.new_distance(copy_rd,dst,diff1,new_cut_dim); - - CGAL_assertion(new_rd >= copy_rd); - dists[new_cut_dim] = diff1; - Node_with_distance *Upper_Child = - new Node_with_distance(node->upper(), new_rd, dists); - PriorityQueue.push(Upper_Child); - dists[new_cut_dim] = dst; - N=node->lower(); - - } - else { // compute new distance - new_rd=Orthogonal_distance_instance.new_distance(copy_rd,dst,diff2,new_cut_dim); - CGAL_assertion(new_rd >= copy_rd); - dists[new_cut_dim] = diff2; - Node_with_distance *Lower_Child = - new Node_with_distance(node->lower(), new_rd, dists); - PriorityQueue.push(Lower_Child); - dists[new_cut_dim] = dst; - N=node->upper(); - } - } - // n is a leaf - typename Tree::Leaf_node_const_handle node = - static_cast(N); - number_of_leaf_nodes_visited++; - if (node->size() > 0) { - for (typename Tree::iterator it=node->begin(); it != node->end(); it++) { - number_of_items_visited++; - FT distance_to_query_point= - Orthogonal_distance_instance.transformed_distance(query_point,*it); - Point_with_transformed_distance *NN_Candidate= - new Point_with_transformed_distance(*it,distance_to_query_point); - Item_PriorityQueue.push(NN_Candidate); - } - // old top of PriorityQueue has been processed, - // hence update rd - - if (!(PriorityQueue.empty())) { - rd = CGAL::cpp11::get<1>(*PriorityQueue.top()); - next_neighbour_found = - (multiplication_factor*rd > - Item_PriorityQueue.top()->second); - } - else // priority queue empty => last neighbour found - { - next_neighbour_found=true; - } - - number_of_neighbours_computed++; - } - } // next_neighbour_found or priority queue is empty - // in the latter case also the item priority quee is empty - } - - - void - Compute_the_next_furthest_neighbour() - { - // compute the next item - bool next_neighbour_found=false; - if (!(Item_PriorityQueue.empty())) { - next_neighbour_found= - (rd < multiplication_factor*Item_PriorityQueue.top()->second); - } - typename SearchTraits::Construct_cartesian_const_iterator_d construct_it=traits.construct_cartesian_const_iterator_d_object(); - typename SearchTraits::Cartesian_const_iterator_d query_point_it = construct_it(query_point); - // otherwise browse the tree further - while ((!next_neighbour_found) && (!PriorityQueue.empty())) { - Node_with_distance* The_node_top=PriorityQueue.top(); - Node_const_handle N= CGAL::cpp11::get<0>(*The_node_top); - dists = CGAL::cpp11::get<2>(*The_node_top); - PriorityQueue.pop(); - delete The_node_top; - FT copy_rd=rd; - while (!(N->is_leaf())) { // compute new distance - typename Tree::Internal_node_const_handle node = - static_cast(N); - number_of_internal_nodes_visited++; - int new_cut_dim=node->cutting_dimension(); - FT new_rd,dst = dists[new_cut_dim]; - FT val = *(query_point_it + new_cut_dim); - FT diff1 = val - node->upper_low_value(); - FT diff2 = val - node->lower_high_value(); - if (diff1 + diff2 < FT(0.0)) { - diff1 = val - node->upper_high_value(); - new_rd= - Orthogonal_distance_instance.new_distance(copy_rd,dst,diff1,new_cut_dim); - Node_with_distance *Lower_Child = - new Node_with_distance(node->lower(), copy_rd, dists); - PriorityQueue.push(Lower_Child); - N=node->upper(); - dists[new_cut_dim] = diff1; - copy_rd=new_rd; - - } - else { // compute new distance - diff2 = val - node->lower_low_value(); - new_rd=Orthogonal_distance_instance.new_distance(copy_rd,dst,diff2,new_cut_dim); - Node_with_distance *Upper_Child = - new Node_with_distance(node->upper(), copy_rd, dists); - PriorityQueue.push(Upper_Child); - N=node->lower(); - dists[new_cut_dim] = diff2; - copy_rd=new_rd; - } - } - // n is a leaf - typename Tree::Leaf_node_const_handle node = - static_cast(N); - number_of_leaf_nodes_visited++; - if (node->size() > 0) { - for (typename Tree::iterator it=node->begin(); it != node->end(); it++) { - number_of_items_visited++; - FT distance_to_query_point= - Orthogonal_distance_instance.transformed_distance(query_point,*it); - Point_with_transformed_distance *NN_Candidate= - new Point_with_transformed_distance(*it,distance_to_query_point); - Item_PriorityQueue.push(NN_Candidate); - } - // old top of PriorityQueue has been processed, - // hence update rd - - if (!(PriorityQueue.empty())) { - rd = CGAL::cpp11::get<1>(*PriorityQueue.top()); - next_neighbour_found = - (multiplication_factor*rd < - Item_PriorityQueue.top()->second); - } - else // priority queue empty => last neighbour found - { - next_neighbour_found=true; - } - - number_of_neighbours_computed++; - } - } // next_neighbour_found or priority queue is empty - // in the latter case also the item priority quee is empty - } - }; // class Iterator_implementaion - - - - - - - - - - public: - class iterator; - typedef iterator const_iterator; - - // constructor - Orthogonal_incremental_neighbor_search(const Tree& tree, - const Query_item& q, FT Eps = FT(0.0), - bool search_nearest=true, const Distance& tr=Distance()) - : m_tree(tree),m_query(q),m_dist(tr),m_Eps(Eps),m_search_nearest(search_nearest) - {} - - iterator - begin() const - { - return iterator(m_tree,m_query,m_dist,m_Eps,m_search_nearest); - } - - iterator - end() const - { - return iterator(); - } - - std::ostream& - statistics(std::ostream& s) - { - begin()->statistics(s); - return s; - } - - - - - class iterator { - - public: - - typedef std::input_iterator_tag iterator_category; - typedef Point_with_transformed_distance value_type; - typedef Point_with_transformed_distance* pointer; - typedef const Point_with_transformed_distance& reference; - typedef std::size_t size_type; - typedef std::ptrdiff_t difference_type; - typedef int distance_type; - - //class Iterator_implementation; - Iterator_implementation *Ptr_implementation; - - - public: - - // default constructor - iterator() - : Ptr_implementation(0) - {} - - int - the_number_of_items_visited() - { - return Ptr_implementation->number_of_items_visited; - } - - // constructor - iterator(const Tree& tree,const Query_item& q, const Distance& tr=Distance(), FT eps=FT(0.0), - bool search_nearest=true) - : Ptr_implementation(new Iterator_implementation(tree, q, tr, eps, search_nearest)) - {} - - // copy constructor - iterator(const iterator& Iter) - { - Ptr_implementation = Iter.Ptr_implementation; - if (Ptr_implementation != 0) Ptr_implementation->reference_count++; - } - - iterator& operator=(const iterator& Iter) - { - if (Ptr_implementation != Iter.Ptr_implementation){ - if (Ptr_implementation != 0 && --(Ptr_implementation->reference_count)==0) { - delete Ptr_implementation; - } - Ptr_implementation = Iter.Ptr_implementation; - if (Ptr_implementation != 0) Ptr_implementation->reference_count++; - } - return *this; - } - - - const Point_with_transformed_distance& - operator* () const - { - return *(*Ptr_implementation); - } - - // -> operator - const Point_with_transformed_distance* - operator-> () const - { - return &*(*Ptr_implementation); - } - - // prefix operator - iterator& - operator++() - { - ++(*Ptr_implementation); - return *this; - } - - // postfix operator - Object_wrapper - operator++(int) - { - return (*Ptr_implementation)++; - } - - - bool - operator==(const iterator& It) const - { - if ( - ((Ptr_implementation == 0) || - Ptr_implementation->Item_PriorityQueue.empty()) && - ((It.Ptr_implementation == 0) || - It.Ptr_implementation->Item_PriorityQueue.empty()) - ) - return true; - // else - return (Ptr_implementation == It.Ptr_implementation); - } - - bool - operator!=(const iterator& It) const - { - return !(*this == It); - } - - std::ostream& - statistics (std::ostream& s) - { - Ptr_implementation->statistics(s); - return s; - } - - ~iterator() - { - if (Ptr_implementation != 0) { - Ptr_implementation->reference_count--; - if (Ptr_implementation->reference_count==0) { - delete Ptr_implementation; - Ptr_implementation = 0; - } - } - } - - - }; // class iterator - - //data members - const Tree& m_tree; - Query_item m_query; - Distance m_dist; - FT m_Eps; - bool m_search_nearest; - }; // class - - template - void swap (typename Orthogonal_incremental_neighbor_search::iterator& x, - typename Orthogonal_incremental_neighbor_search::iterator& y) - { - typename Orthogonal_incremental_neighbor_search::iterator::Iterator_implementation - *tmp = x.Ptr_implementation; - x.Ptr_implementation = y.Ptr_implementation; - y.Ptr_implementation = tmp; - } - -} // namespace CGAL - -#endif // CGAL_ORTHOGONAL_INCREMENTAL_NEIGHBOR_SEARCH_H diff --git a/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h b/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h deleted file mode 100644 index 111c6ac9..00000000 --- a/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h +++ /dev/null @@ -1,1169 +0,0 @@ -// Copyright (c) 2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Clement Jamin - -#ifndef CGAL_REGULAR_TRIANGULATION_H -#define CGAL_REGULAR_TRIANGULATION_H - -#include -#include -#include -#include -#include - -#include - -namespace CGAL { - -template< typename Traits_, typename TDS_ = Default > -class Regular_triangulation -: public Triangulation< - Regular_triangulation_traits_adapter, - typename Default::Get< - TDS_, - Triangulation_data_structure< - typename Regular_triangulation_traits_adapter::Dimension, - Triangulation_vertex >, - Triangulation_full_cell > - > - >::type> -{ - typedef Regular_triangulation_traits_adapter RTTraits; - typedef typename RTTraits::Dimension Maximal_dimension_; - typedef typename Default::Get< - TDS_, - Triangulation_data_structure< - Maximal_dimension_, - Triangulation_vertex, - Triangulation_full_cell - > >::type TDS; - typedef Triangulation Base; - typedef Regular_triangulation Self; - - typedef typename RTTraits::Orientation_d Orientation_d; - typedef typename RTTraits::Power_side_of_power_sphere_d Power_side_of_power_sphere_d; - typedef typename RTTraits::In_flat_power_side_of_power_sphere_d - In_flat_power_side_of_power_sphere_d; - typedef typename RTTraits::Flat_orientation_d Flat_orientation_d; - typedef typename RTTraits::Construct_flat_orientation_d Construct_flat_orientation_d; - -public: // PUBLIC NESTED TYPES - - typedef RTTraits Geom_traits; - typedef typename Base::Triangulation_ds Triangulation_ds; - - typedef typename Base::Vertex Vertex; - typedef typename Base::Full_cell Full_cell; - typedef typename Base::Facet Facet; - typedef typename Base::Face Face; - - typedef Maximal_dimension_ Maximal_dimension; - typedef typename RTTraits::Bare_point_d Bare_point; - typedef typename RTTraits::Weighted_point_d Weighted_point; - - typedef typename Base::Point_const_iterator Point_const_iterator; - typedef typename Base::Vertex_handle Vertex_handle; - typedef typename Base::Vertex_iterator Vertex_iterator; - typedef typename Base::Vertex_const_handle Vertex_const_handle; - typedef typename Base::Vertex_const_iterator Vertex_const_iterator; - - typedef typename Base::Full_cell_handle Full_cell_handle; - typedef typename Base::Full_cell_iterator Full_cell_iterator; - typedef typename Base::Full_cell_const_handle Full_cell_const_handle; - typedef typename Base::Full_cell_const_iterator Full_cell_const_iterator; - typedef typename Base::Finite_full_cell_const_iterator - Finite_full_cell_const_iterator; - - typedef typename Base::size_type size_type; - typedef typename Base::difference_type difference_type; - - typedef typename Base::Locate_type Locate_type; - - //Tag to distinguish Delaunay from Regular triangulations - typedef Tag_true Weighted_tag; - -protected: // DATA MEMBERS - - -public: - - using typename Base::Rotor; - using Base::maximal_dimension; - using Base::are_incident_full_cells_valid; - using Base::coaffine_orientation_predicate; - using Base::reset_flat_orientation; - using Base::current_dimension; - using Base::geom_traits; - using Base::index_of_covertex; - //using Base::index_of_second_covertex; - using Base::rotate_rotor; - using Base::infinite_vertex; - using Base::insert_in_hole; - using Base::is_infinite; - using Base::locate; - using Base::points_begin; - using Base::set_neighbors; - using Base::new_full_cell; - using Base::number_of_vertices; - using Base::orientation; - using Base::tds; - using Base::reorient_full_cells; - using Base::full_cell; - using Base::full_cells_begin; - using Base::full_cells_end; - using Base::finite_full_cells_begin; - using Base::finite_full_cells_end; - using Base::vertices_begin; - using Base::vertices_end; - -private: - - // Wrapper - struct Power_side_of_power_sphere_for_non_maximal_dim_d - { - boost::optional* fop; - Construct_flat_orientation_d cfo; - In_flat_power_side_of_power_sphere_d ifpt; - - Power_side_of_power_sphere_for_non_maximal_dim_d( - boost::optional& x, - Construct_flat_orientation_d const&y, - In_flat_power_side_of_power_sphere_d const&z) - : fop(&x), cfo(y), ifpt(z) {} - - template - CGAL::Orientation operator()(Iter a, Iter b, const Weighted_point & p)const - { - if(!*fop) - *fop=cfo(a,b); - return ifpt(fop->get(),a,b,p); - } - }; - -public: - -// - - - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS - - Regular_triangulation(int dim, const Geom_traits &k = Geom_traits()) - : Base(dim, k) - { - } - - // With this constructor, - // the user can specify a Flat_orientation_d object to be used for - // orienting simplices of a specific dimension - // (= preset_flat_orientation_.first) - // It it used by the dark triangulations created by DT::remove - Regular_triangulation( - int dim, - const std::pair &preset_flat_orientation, - const Geom_traits &k = Geom_traits()) - : Base(dim, preset_flat_orientation, k) - { - } - - ~Regular_triangulation() {} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS - - // Not Documented - Power_side_of_power_sphere_for_non_maximal_dim_d power_side_of_power_sphere_for_non_maximal_dim_predicate() const - { - return Power_side_of_power_sphere_for_non_maximal_dim_d ( - flat_orientation_, - geom_traits().construct_flat_orientation_d_object(), - geom_traits().in_flat_power_side_of_power_sphere_d_object() - ); - } - - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS - - // Warning: these functions are not correct since they do not restore hidden - // vertices - - Full_cell_handle remove(Vertex_handle); - Full_cell_handle remove(const Weighted_point & p, Full_cell_handle hint = Full_cell_handle()) - { - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle s = locate(p, lt, f, ft, hint); - if( Base::ON_VERTEX == lt ) - { - return remove(s->vertex(f.index(0))); - } - return Full_cell_handle(); - } - - template< typename ForwardIterator > - void remove(ForwardIterator start, ForwardIterator end) - { - while( start != end ) - remove(*start++); - } - - // Not documented - void remove_decrease_dimension(Vertex_handle); - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS - - template< typename ForwardIterator > - std::ptrdiff_t insert(ForwardIterator start, ForwardIterator end) - { - size_type n = number_of_vertices(); - typedef std::vector WP_vec; - WP_vec points(start, end); - - spatial_sort(points.begin(), points.end(), geom_traits()); - - Full_cell_handle hint; - for(typename WP_vec::const_iterator p = points.begin(); p != points.end(); ++p ) - { - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle c = locate (*p, lt, f, ft, hint); - Vertex_handle v = insert (*p, lt, f, ft, c); - - hint = v == Vertex_handle() ? c : v->full_cell(); - } - return number_of_vertices() - n; - } - - Vertex_handle insert(const Weighted_point &, - Locate_type, - const Face &, - const Facet &, - Full_cell_handle); - - Vertex_handle insert(const Weighted_point & p, - Full_cell_handle start = Full_cell_handle()) - { - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle s = locate(p, lt, f, ft, start); - return insert(p, lt, f, ft, s); - } - - Vertex_handle insert(const Weighted_point & p, Vertex_handle hint) - { - CGAL_assertion( Vertex_handle() != hint ); - return insert(p, hint->full_cell()); - } - - Vertex_handle insert_outside_affine_hull(const Weighted_point &); - Vertex_handle insert_in_conflicting_cell( - const Weighted_point &, Full_cell_handle, - Vertex_handle only_if_this_vertex_is_in_the_cz = Vertex_handle()); - - Vertex_handle insert_if_in_star(const Weighted_point &, - Vertex_handle, - Locate_type, - const Face &, - const Facet &, - Full_cell_handle); - - Vertex_handle insert_if_in_star( - const Weighted_point & p, Vertex_handle star_center, - Full_cell_handle start = Full_cell_handle()) - { - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle s = locate(p, lt, f, ft, start); - return insert_if_in_star(p, star_center, lt, f, ft, s); - } - - Vertex_handle insert_if_in_star( - const Weighted_point & p, Vertex_handle star_center, - Vertex_handle hint) - { - CGAL_assertion( Vertex_handle() != hint ); - return insert_if_in_star(p, star_center, hint->full_cell()); - } - -// - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES - - bool is_in_conflict(const Weighted_point &, Full_cell_const_handle) const; - - template< class OrientationPredicate > - Oriented_side perturbed_power_side_of_power_sphere(const Weighted_point &, - Full_cell_const_handle, const OrientationPredicate &) const; - - template< typename OutputIterator > - Facet compute_conflict_zone(const Weighted_point &, Full_cell_handle, OutputIterator) const; - - template < typename OrientationPredicate, typename PowerTestPredicate > - class Conflict_predicate - { - const Self & rt_; - const Weighted_point & p_; - OrientationPredicate ori_; - PowerTestPredicate power_side_of_power_sphere_; - int cur_dim_; - public: - Conflict_predicate( - const Self & rt, - const Weighted_point & p, - const OrientationPredicate & ori, - const PowerTestPredicate & power_side_of_power_sphere) - : rt_(rt), p_(p), ori_(ori), power_side_of_power_sphere_(power_side_of_power_sphere), cur_dim_(rt.current_dimension()) {} - - inline - bool operator()(Full_cell_const_handle s) const - { - bool ok; - if( ! rt_.is_infinite(s) ) - { - Oriented_side power_side_of_power_sphere = power_side_of_power_sphere_(rt_.points_begin(s), rt_.points_begin(s) + cur_dim_ + 1, p_); - if( ON_POSITIVE_SIDE == power_side_of_power_sphere ) - ok = true; - else if( ON_NEGATIVE_SIDE == power_side_of_power_sphere ) - ok = false; - else - ok = ON_POSITIVE_SIDE == rt_.perturbed_power_side_of_power_sphere(p_, s, ori_); - } - else - { - typedef typename Full_cell::Vertex_handle_const_iterator VHCI; - typedef Substitute_point_in_vertex_iterator F; - F spivi(rt_.infinite_vertex(), &p_); - - Orientation o = ori_( - boost::make_transform_iterator(s->vertices_begin(), spivi), - boost::make_transform_iterator(s->vertices_begin() + cur_dim_ + 1, - spivi)); - - if( POSITIVE == o ) - ok = true; - else if( o == NEGATIVE ) - ok = false; - else - ok = (*this)(s->neighbor( s->index( rt_.infinite_vertex() ) )); - } - return ok; - } - }; - - template < typename ConflictPredicate > - class Conflict_traversal_predicate - { - const Self & rt_; - const ConflictPredicate & pred_; - public: - Conflict_traversal_predicate(const Self & rt, const ConflictPredicate & pred) - : rt_(rt), pred_(pred) - {} - inline - bool operator()(const Facet & f) const - { - return pred_(rt_.full_cell(f)->neighbor(rt_.index_of_covertex(f))); - } - }; - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY - - bool is_valid(bool verbose = false, int level = 0) const; - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MISC - - std::size_t number_of_hidden_vertices() const - { - return m_hidden_points.size(); - } - -private: - - template - bool - does_cell_range_contain_vertex(InputIterator cz_begin, InputIterator cz_end, - Vertex_handle vh) const - { - // Check all vertices - while(cz_begin != cz_end) - { - Full_cell_handle fch = *cz_begin; - for (int i = 0 ; i <= current_dimension() ; ++i) - { - if (fch->vertex(i) == vh) - return true; - } - ++cz_begin; - } - return false; - } - - template - void - process_conflict_zone(InputIterator cz_begin, InputIterator cz_end, - OutputIterator vertices_out) const - { - // Get all vertices - while(cz_begin != cz_end) - { - Full_cell_handle fch = *cz_begin; - for (int i = 0 ; i <= current_dimension() ; ++i) - { - Vertex_handle vh = fch->vertex(i); - if (vh->full_cell() != Full_cell_handle()) - { - (*vertices_out++) = vh; - vh->set_full_cell(Full_cell_handle()); - } - } - ++cz_begin; - } - } - - - template - void - process_cz_vertices_after_insertion(InputIterator vertices_begin, - InputIterator vertices_end) - { - // Get all vertices - while(vertices_begin != vertices_end) - { - Vertex_handle vh = *vertices_begin; - if (vh->full_cell() == Full_cell_handle()) - { - m_hidden_points.push_back(vh->point()); - tds().delete_vertex(vh); - } - ++vertices_begin; - } - } - -private: - // Some internal types to shorten notation - using typename Base::Coaffine_orientation_d; - using Base::flat_orientation_; - typedef Conflict_predicate - Conflict_pred_in_subspace; - typedef Conflict_predicate - Conflict_pred_in_fullspace; - typedef Conflict_traversal_predicate - Conflict_traversal_pred_in_subspace; - typedef Conflict_traversal_predicate - Conflict_traversal_pred_in_fullspace; - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MEMBER VARIABLES - std::vector m_hidden_points; - -}; // class Regular_triangulation - - -// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = -// FUNCTIONS THAT ARE MEMBER METHODS: - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS - - -// Warning: this function is not correct since it does not restore hidden -// vertices -template< typename Traits, typename TDS > -typename Regular_triangulation::Full_cell_handle -Regular_triangulation -::remove( Vertex_handle v ) -{ - CGAL_precondition( ! is_infinite(v) ); - CGAL_expensive_precondition( is_vertex(v) ); - - // THE CASE cur_dim == 0 - if( 0 == current_dimension() ) - { - remove_decrease_dimension(v); - return Full_cell_handle(); - } - else if( 1 == current_dimension() ) - { // THE CASE cur_dim == 1 - if( 2 == number_of_vertices() ) - { - remove_decrease_dimension(v); - return Full_cell_handle(); - } - Full_cell_handle left = v->full_cell(); - if( 0 == left->index(v) ) - left = left->neighbor(1); - CGAL_assertion( 1 == left->index(v) ); - Full_cell_handle right = left->neighbor(0); - tds().associate_vertex_with_full_cell(left, 1, right->vertex(1)); - set_neighbors(left, 0, right->neighbor(0), right->mirror_index(0)); - tds().delete_vertex(v); - tds().delete_full_cell(right); - return left; - } - - // THE CASE cur_dim >= 2 - // Gather the finite vertices sharing an edge with |v| - typedef typename Base::template Full_cell_set Simplices; - Simplices simps; - std::back_insert_iterator out(simps); - tds().incident_full_cells(v, out); - typedef std::set Vertex_set; - Vertex_set verts; - Vertex_handle vh; - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - for( int i = 0; i <= current_dimension(); ++i ) - { - vh = (*it)->vertex(i); - if( is_infinite(vh) ) - continue; - if( vh == v ) - continue; - verts.insert(vh); - } - - // After gathering finite neighboring vertices, create their Dark Delaunay triangulation - typedef Triangulation_vertex Dark_vertex_base; - typedef Triangulation_full_cell< - Geom_traits, - internal::Triangulation::Dark_full_cell_data > Dark_full_cell_base; - typedef Triangulation_data_structure Dark_tds; - typedef Regular_triangulation Dark_triangulation; - typedef typename Dark_triangulation::Face Dark_face; - typedef typename Dark_triangulation::Facet Dark_facet; - typedef typename Dark_triangulation::Vertex_handle Dark_v_handle; - typedef typename Dark_triangulation::Full_cell_handle Dark_s_handle; - - // If flat_orientation_ is defined, we give it the Dark triangulation - // so that the orientation it uses for "current_dimension()"-simplices is - // coherent with the global triangulation - Dark_triangulation dark_side( - maximal_dimension(), - flat_orientation_ ? - std::pair(current_dimension(), flat_orientation_.get_ptr()) - : std::pair(std::numeric_limits::max(), NULL) ); - - Dark_s_handle dark_s; - Dark_v_handle dark_v; - typedef std::map Vertex_map; - Vertex_map light_to_dark; - typename Vertex_set::iterator vit = verts.begin(); - while( vit != verts.end() ) - { - dark_v = dark_side.insert((*vit)->point(), dark_s); - dark_s = dark_v->full_cell(); - dark_v->data() = *vit; - light_to_dark[*vit] = dark_v; - ++vit; - } - - if( dark_side.current_dimension() != current_dimension() ) - { - CGAL_assertion( dark_side.current_dimension() + 1 == current_dimension() ); - // Here, the finite neighbors of |v| span a affine subspace of - // dimension one less than the current dimension. Two cases are possible: - if( (size_type)(verts.size() + 1) == number_of_vertices() ) - { - remove_decrease_dimension(v); - return Full_cell_handle(); - } - else - { // |v| is strictly outside the convex hull of the rest of the points. This is an - // easy case: first, modify the finite full_cells, then, delete the infinite ones. - // We don't even need the Dark triangulation. - Simplices infinite_simps; - { - Simplices finite_simps; - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - if( is_infinite(*it) ) - infinite_simps.push_back(*it); - else - finite_simps.push_back(*it); - simps.swap(finite_simps); - } // now, simps only contains finite simplices - // First, modify the finite full_cells: - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - { - int v_idx = (*it)->index(v); - tds().associate_vertex_with_full_cell(*it, v_idx, infinite_vertex()); - } - // Make the handles to infinite full cells searchable - infinite_simps.make_searchable(); - // Then, modify the neighboring relation - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - { - for( int i = 0 ; i <= current_dimension(); ++i ) - { - if (is_infinite((*it)->vertex(i))) - continue; - (*it)->vertex(i)->set_full_cell(*it); - Full_cell_handle n = (*it)->neighbor(i); - // Was |n| a finite full cell prior to removing |v| ? - if( ! infinite_simps.contains(n) ) - continue; - int n_idx = n->index(v); - set_neighbors(*it, i, n->neighbor(n_idx), n->neighbor(n_idx)->index(n)); - } - } - Full_cell_handle ret_s; - // Then, we delete the infinite full_cells - for( typename Simplices::iterator it = infinite_simps.begin(); it != infinite_simps.end(); ++it ) - tds().delete_full_cell(*it); - tds().delete_vertex(v); - return simps.front(); - } - } - else // From here on, dark_side.current_dimension() == current_dimension() - { - dark_side.infinite_vertex()->data() = infinite_vertex(); - light_to_dark[infinite_vertex()] = dark_side.infinite_vertex(); - } - - // Now, compute the conflict zone of v->point() in - // the dark side. This is precisely the set of full_cells - // that we have to glue back into the light side. - Dark_face dark_f(dark_side.maximal_dimension()); - Dark_facet dark_ft; - typename Dark_triangulation::Locate_type lt; - dark_s = dark_side.locate(v->point(), lt, dark_f, dark_ft); - CGAL_assertion( lt != Dark_triangulation::ON_VERTEX - && lt != Dark_triangulation::OUTSIDE_AFFINE_HULL ); - - // |ret_s| is the full_cell that we return - Dark_s_handle dark_ret_s = dark_s; - Full_cell_handle ret_s; - - typedef typename Base::template Full_cell_set Dark_full_cells; - Dark_full_cells conflict_zone; - std::back_insert_iterator dark_out(conflict_zone); - - dark_ft = dark_side.compute_conflict_zone(v->point(), dark_s, dark_out); - // Make the dark simplices in the conflict zone searchable - conflict_zone.make_searchable(); - - // THE FOLLOWING SHOULD MAYBE GO IN TDS. - // Here is the plan: - // 1. Pick any Facet from boundary of the light zone - // 2. Find corresponding Facet on boundary of dark zone - // 3. stitch. - - // 1. Build a facet on the boudary of the light zone: - Full_cell_handle light_s = *simps.begin(); - Facet light_ft(light_s, light_s->index(v)); - - // 2. Find corresponding Dark_facet on boundary of the dark zone - Dark_full_cells dark_incident_s; - for( int i = 0; i <= current_dimension(); ++i ) - { - if( index_of_covertex(light_ft) == i ) - continue; - Dark_v_handle dark_v = light_to_dark[full_cell(light_ft)->vertex(i)]; - dark_incident_s.clear(); - dark_out = std::back_inserter(dark_incident_s); - dark_side.tds().incident_full_cells(dark_v, dark_out); - for(typename Dark_full_cells::iterator it = dark_incident_s.begin(); - it != dark_incident_s.end(); - ++it) - { - (*it)->data().count_ += 1; - } - } - - for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it ) - { - if( current_dimension() != (*it)->data().count_ ) - continue; - if( ! conflict_zone.contains(*it) ) - continue; - // We found a full_cell incident to the dark facet corresponding to the light facet |light_ft| - int ft_idx = 0; - while( light_s->has_vertex( (*it)->vertex(ft_idx)->data() ) ) - ++ft_idx; - dark_ft = Dark_facet(*it, ft_idx); - break; - } - // Pre-3. Now, we are ready to traverse both boundary and do the stiching. - - // But first, we create the new full_cells in the light triangulation, - // with as much adjacency information as possible. - - // Create new full_cells with vertices - for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) - { - Full_cell_handle new_s = new_full_cell(); - (*it)->data().light_copy_ = new_s; - for( int i = 0; i <= current_dimension(); ++i ) - tds().associate_vertex_with_full_cell(new_s, i, (*it)->vertex(i)->data()); - if( dark_ret_s == *it ) - ret_s = new_s; - } - - // Setup adjacencies inside the hole - for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) - { - Full_cell_handle new_s = (*it)->data().light_copy_; - for( int i = 0; i <= current_dimension(); ++i ) - if( conflict_zone.contains((*it)->neighbor(i)) ) - tds().set_neighbors(new_s, i, (*it)->neighbor(i)->data().light_copy_, (*it)->mirror_index(i)); - } - - // 3. Stitch - simps.make_searchable(); - typedef std::queue > Queue; - Queue q; - q.push(std::make_pair(light_ft, dark_ft)); - dark_s = dark_side.full_cell(dark_ft); - int dark_i = dark_side.index_of_covertex(dark_ft); - // mark dark_ft as visited: - // TODO try by marking with Dark_v_handle (vertex) - dark_s->neighbor(dark_i)->set_neighbor(dark_s->mirror_index(dark_i), Dark_s_handle()); - while( ! q.empty() ) - { - std::pair p = q.front(); - q.pop(); - light_ft = p.first; - dark_ft = p.second; - light_s = full_cell(light_ft); - int light_i = index_of_covertex(light_ft); - dark_s = dark_side.full_cell(dark_ft); - int dark_i = dark_side.index_of_covertex(dark_ft); - Full_cell_handle light_n = light_s->neighbor(light_i); - set_neighbors(dark_s->data().light_copy_, dark_i, light_n, light_s->mirror_index(light_i)); - for( int di = 0; di <= current_dimension(); ++di ) - { - if( di == dark_i ) - continue; - int li = light_s->index(dark_s->vertex(di)->data()); - Rotor light_r(light_s, li, light_i); - typename Dark_triangulation::Rotor dark_r(dark_s, di, dark_i); - - while( simps.contains(cpp11::get<0>(light_r)->neighbor(cpp11::get<1>(light_r))) ) - light_r = rotate_rotor(light_r); - - while( conflict_zone.contains(cpp11::get<0>(dark_r)->neighbor(cpp11::get<1>(dark_r))) ) - dark_r = dark_side.rotate_rotor(dark_r); - - Dark_s_handle dark_ns = cpp11::get<0>(dark_r); - int dark_ni = cpp11::get<1>(dark_r); - Full_cell_handle light_ns = cpp11::get<0>(light_r); - int light_ni = cpp11::get<1>(light_r); - // mark dark_r as visited: - // TODO try by marking with Dark_v_handle (vertex) - Dark_s_handle outside = dark_ns->neighbor(dark_ni); - Dark_v_handle mirror = dark_ns->mirror_vertex(dark_ni, current_dimension()); - int dn = outside->index(mirror); - if( Dark_s_handle() == outside->neighbor(dn) ) - continue; - outside->set_neighbor(dn, Dark_s_handle()); - q.push(std::make_pair(Facet(light_ns, light_ni), Dark_facet(dark_ns, dark_ni))); - } - } - tds().delete_full_cells(simps.begin(), simps.end()); - tds().delete_vertex(v); - return ret_s; -} - -template< typename Traits, typename TDS > -void -Regular_triangulation -::remove_decrease_dimension(Vertex_handle v) -{ - CGAL_precondition( current_dimension() >= 0 ); - tds().remove_decrease_dimension(v, infinite_vertex()); - // reset the predicates: - reset_flat_orientation(); - if( 1 <= current_dimension() ) - { - Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); - int inf_v_index = inf_v_cell->index(infinite_vertex()); - Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); - Orientation o = orientation(s); - CGAL_assertion( ZERO != o ); - if( NEGATIVE == o ) - reorient_full_cells(); - } -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS - -template< typename Traits, typename TDS > -typename Regular_triangulation::Vertex_handle -Regular_triangulation -::insert(const Weighted_point & p, Locate_type lt, const Face & f, const Facet & ft, Full_cell_handle s) -{ - switch( lt ) - { - case Base::OUTSIDE_AFFINE_HULL: - return insert_outside_affine_hull(p); - break; - case Base::ON_VERTEX: - { - Vertex_handle v = s->vertex(f.index(0)); - typename RTTraits::Compute_weight_d pw = - geom_traits().compute_weight_d_object(); - - if (pw(p) == pw(v->point())) - return v; - // If dim == 0 and the new point has a bigger weight, - // we just replace the point, and the former point gets hidden - else if (current_dimension() == 0) - { - if (pw(p) > pw(v->point())) - { - m_hidden_points.push_back(v->point()); - v->set_point(p); - return v; - } - // Otherwise, the new point is hidden - else - { - m_hidden_points.push_back(p); - return Vertex_handle(); - } - } - // Otherwise, we apply the "normal" algorithm - - // !NO break here! - } - default: - return insert_in_conflicting_cell(p, s); - } -} - -/* -Inserts the point `p` in the regular triangulation. Returns a handle to the -newly created vertex at that position. -\pre The point `p` -must lie outside the affine hull of the regular triangulation. This implies that -`rt`.`current_dimension()` must be smaller than `rt`.`maximal_dimension()`. -*/ -template< typename Traits, typename TDS > -typename Regular_triangulation::Vertex_handle -Regular_triangulation -::insert_outside_affine_hull(const Weighted_point & p) -{ - // we don't use Base::insert_outside_affine_hull(...) because here, we - // also need to reset the side_of_oriented_subsphere functor. - CGAL_precondition( current_dimension() < maximal_dimension() ); - Vertex_handle v = tds().insert_increase_dimension(infinite_vertex()); - // reset the predicates: - reset_flat_orientation(); - v->set_point(p); - if( current_dimension() >= 1 ) - { - Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); - int inf_v_index = inf_v_cell->index(infinite_vertex()); - Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); - Orientation o = orientation(s); - CGAL_assertion( ZERO != o ); - if( NEGATIVE == o ) - reorient_full_cells(); - - // We just inserted the second finite point and the right infinite - // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be - // consistent with the rest of the cells - if (current_dimension() == 1) - { - // Is "inf_v_cell" the right infinite cell? Then inf_v_index should be 1 - if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 - && inf_v_index == 0) - { - inf_v_cell->swap_vertices(current_dimension() - 1, current_dimension()); - } - else - { - inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2); - inf_v_index = inf_v_cell->index(infinite_vertex()); - // Is "inf_v_cell" the right infinite cell? Then inf_v_index should be 1 - if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 - && inf_v_index == 0) - { - inf_v_cell->swap_vertices(current_dimension() - 1, current_dimension()); - } - } - } - } - return v; -} - -template< typename Traits, typename TDS > -typename Regular_triangulation::Vertex_handle -Regular_triangulation -::insert_if_in_star(const Weighted_point & p, - Vertex_handle star_center, - Locate_type lt, - const Face & f, - const Facet & ft, - Full_cell_handle s) -{ - switch( lt ) - { - case Base::OUTSIDE_AFFINE_HULL: - return insert_outside_affine_hull(p); - break; - case Base::ON_VERTEX: - { - Vertex_handle v = s->vertex(f.index(0)); - typename RTTraits::Compute_weight_d pw = - geom_traits().compute_weight_d_object(); - if (pw(p) == pw(v->point())) - return v; - // If dim == 0 and the new point has a bigger weight, - // we replace the point - else if (current_dimension() == 0) - { - if (pw(p) > pw(v->point())) - v->set_point(p); - else - return v; - } - // Otherwise, we apply the "normal" algorithm - - // !NO break here! - } - default: - return insert_in_conflicting_cell(p, s, star_center); - } - - return Vertex_handle(); -} - -/* -[Undocumented function] - -Inserts the point `p` in the regular triangulation. `p` must be -in conflict with the second parameter `c`, which is used as a -starting point for `compute_conflict_zone`. -The function is faster than the standard `insert` function since -it does not need to call `locate`. - -If this insertion creates a vertex, this vertex is returned. - -If `p` coincides with an existing vertex and has a greater weight, -then the existing weighted point becomes hidden and `p` replaces it as vertex -of the triangulation. - -If `p` coincides with an already existing vertex (both point and -weights being equal), then this vertex is returned and the triangulation -remains unchanged. - -Otherwise if `p` does not appear as a vertex of the triangulation, -then it is stored as a hidden point and this method returns the default -constructed handle. - -\pre The point `p` must be in conflict with the full cell `c`. -*/ - -template< typename Traits, typename TDS > -typename Regular_triangulation::Vertex_handle -Regular_triangulation -::insert_in_conflicting_cell(const Weighted_point & p, - Full_cell_handle s, - Vertex_handle only_if_this_vertex_is_in_the_cz) -{ - typedef std::vector Full_cell_h_vector; - - bool in_conflict = is_in_conflict(p, s); - - // If p is not in conflict with s, then p is hidden - // => we don't insert it - if (!in_conflict) - { - m_hidden_points.push_back(p); - return Vertex_handle(); - } - else - { - Full_cell_h_vector cs; // for storing conflicting full_cells. - cs.reserve(64); - std::back_insert_iterator out(cs); - Facet ft = compute_conflict_zone(p, s, out); - - // Check if the CZ contains "only_if_this_vertex_is_in_the_cz" - if (only_if_this_vertex_is_in_the_cz != Vertex_handle() - && !does_cell_range_contain_vertex(cs.begin(), cs.end(), - only_if_this_vertex_is_in_the_cz)) - { - return Vertex_handle(); - } - - // Otherwise, proceed with the insertion - std::vector cz_vertices; - cz_vertices.reserve(64); - process_conflict_zone(cs.begin(), cs.end(), - std::back_inserter(cz_vertices)); - - Vertex_handle ret = insert_in_hole(p, cs.begin(), cs.end(), ft); - - process_cz_vertices_after_insertion(cz_vertices.begin(), cz_vertices.end()); - - return ret; - } -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES - -// NOT DOCUMENTED -template< typename Traits, typename TDS > -template< typename OrientationPred > -Oriented_side -Regular_triangulation -::perturbed_power_side_of_power_sphere(const Weighted_point & p, Full_cell_const_handle s, - const OrientationPred & ori) const -{ - CGAL_precondition_msg( ! is_infinite(s), "full cell must be finite"); - CGAL_expensive_precondition( POSITIVE == orientation(s) ); - typedef std::vector Points; - Points points(current_dimension() + 2); - int i(0); - for( ; i <= current_dimension(); ++i ) - points[i] = &(s->vertex(i)->point()); - points[i] = &p; - std::sort(points.begin(), points.end(), - internal::Triangulation::Compare_points_for_perturbation(*this)); - typename Points::const_reverse_iterator cut_pt = points.rbegin(); - Points test_points; - while( cut_pt != points.rend() ) - { - if( &p == *cut_pt ) - // because the full_cell "s" is assumed to be positively oriented - return ON_NEGATIVE_SIDE; // we consider |p| to lie outside the sphere - test_points.clear(); - Point_const_iterator spit = points_begin(s); - int adjust_sign = -1; - for( i = 0; i < current_dimension(); ++i ) - { - if( &(*spit) == *cut_pt ) - { - ++spit; - adjust_sign = (((current_dimension() + i) % 2) == 0) ? -1 : +1; - } - test_points.push_back(&(*spit)); - ++spit; - } - test_points.push_back(&p); - - typedef typename CGAL::Iterator_project< - typename Points::iterator, - internal::Triangulation::Point_from_pointer, - const Weighted_point &, const Weighted_point * - > Point_pointer_iterator; - - Orientation ori_value = ori( - Point_pointer_iterator(test_points.begin()), - Point_pointer_iterator(test_points.end())); - - if( ZERO != ori_value ) - return Oriented_side( - adjust_sign * ori_value ); - - ++cut_pt; - } - CGAL_assertion(false); // we should never reach here - return ON_NEGATIVE_SIDE; -} - -template< typename Traits, typename TDS > -bool -Regular_triangulation -::is_in_conflict(const Weighted_point & p, Full_cell_const_handle s) const -{ - CGAL_precondition( 1 <= current_dimension() ); - if( current_dimension() < maximal_dimension() ) - { - Conflict_pred_in_subspace c( - *this, p, - coaffine_orientation_predicate(), - power_side_of_power_sphere_for_non_maximal_dim_predicate()); - return c(s); - } - else - { - Orientation_d ori = geom_traits().orientation_d_object(); - Power_side_of_power_sphere_d side = geom_traits().power_side_of_power_sphere_d_object(); - Conflict_pred_in_fullspace c(*this, p, ori, side); - return c(s); - } -} - -template< typename Traits, typename TDS > -template< typename OutputIterator > -typename Regular_triangulation::Facet -Regular_triangulation -::compute_conflict_zone(const Weighted_point & p, Full_cell_handle s, OutputIterator out) const -{ - CGAL_precondition( 1 <= current_dimension() ); - if( current_dimension() < maximal_dimension() ) - { - Conflict_pred_in_subspace c( - *this, p, - coaffine_orientation_predicate(), - power_side_of_power_sphere_for_non_maximal_dim_predicate()); - Conflict_traversal_pred_in_subspace tp(*this, c); - return tds().gather_full_cells(s, tp, out); - } - else - { - Orientation_d ori = geom_traits().orientation_d_object(); - Power_side_of_power_sphere_d side = geom_traits().power_side_of_power_sphere_d_object(); - Conflict_pred_in_fullspace c(*this, p, ori, side); - Conflict_traversal_pred_in_fullspace tp(*this, c); - return tds().gather_full_cells(s, tp, out); - } -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY - -template< typename Traits, typename TDS > -bool -Regular_triangulation -::is_valid(bool verbose, int level) const -{ - if (!Base::is_valid(verbose, level)) - return false; - - int dim = current_dimension(); - if (dim == maximal_dimension()) - { - for (Finite_full_cell_const_iterator cit = finite_full_cells_begin() ; - cit != finite_full_cells_end() ; ++cit ) - { - Full_cell_const_handle ch = cit.base(); - for(int i = 0; i < dim+1 ; ++i ) - { - // If the i-th neighbor is not an infinite cell - Vertex_handle opposite_vh = - ch->neighbor(i)->vertex(ch->neighbor(i)->index(ch)); - if (!is_infinite(opposite_vh)) - { - Power_side_of_power_sphere_d side = - geom_traits().power_side_of_power_sphere_d_object(); - if (side(Point_const_iterator(ch->vertices_begin()), - Point_const_iterator(ch->vertices_end()), - opposite_vh->point()) == ON_POSITIVE_SIDE) - { - if (verbose) - CGAL_warning_msg(false, "Non-empty sphere"); - return false; - } - } - } - } - } - return true; -} - -} //namespace CGAL - -#endif //CGAL_REGULAR_TRIANGULATION_H diff --git a/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h b/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h deleted file mode 100644 index 78bb95a6..00000000 --- a/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright (c) 2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Clement Jamin - -#ifndef CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H -#define CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H - -#include - -#include - -namespace CGAL { - -// Wrapper class to make a model of `RegularTriangulationTraits` easily usable -// by the `Regular_triangulation` class. By using this class: -// - Point_d (used by `Triangulation` and the TDS) becomes a weighted point -// - Predicates and functors such as Less_coordinate_d or Orientation_d -// can be called using weighted points instead of bare points (this is -// needed because `Weighted_point_d` is not convertible to `Point_d`) -// This way, `Triangulation` works perfectly well with weighted points. - -template -class Regular_triangulation_traits_adapter - : public K -{ -public: - typedef K Base; - - // Required by TriangulationTraits - typedef typename K::Dimension Dimension; - typedef typename K::FT FT; - typedef typename K::Flat_orientation_d Flat_orientation_d; - typedef typename K::Weighted_point_d Point_d; - - // Required by RegularTriangulationTraits - typedef typename K::Point_d Bare_point_d; - typedef typename K::Weighted_point_d Weighted_point_d; - typedef typename K::Construct_point_d Construct_point_d; - typedef typename K::Compute_weight_d Compute_weight_d; - typedef typename K::Power_side_of_power_sphere_d Power_side_of_power_sphere_d; - typedef typename K::In_flat_power_side_of_power_sphere_d - In_flat_power_side_of_power_sphere_d; - - //=========================================================================== - // Custom types - //=========================================================================== - - // Required by SpatialSortingTraits_d - class Less_coordinate_d - { - const K &m_kernel; - - public: - typedef bool result_type; - - Less_coordinate_d(const K &kernel) - : m_kernel(kernel) {} - - result_type operator()( - Weighted_point_d const& p, Weighted_point_d const& q, int i) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.less_coordinate_d_object() (cp(p), cp(q), i); - } - }; - - //=========================================================================== - - // Required by TriangulationTraits - class Orientation_d - { - const K &m_kernel; - - public: - typedef Orientation result_type; - - Orientation_d(const K &kernel) - : m_kernel(kernel) {} - - template - result_type operator()(ForwardIterator start, ForwardIterator end) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.orientation_d_object() ( - boost::make_transform_iterator(start, cp), - boost::make_transform_iterator(end, cp) - ); - } - }; - - //=========================================================================== - - // Required by TriangulationTraits - class Construct_flat_orientation_d - { - const K &m_kernel; - - public: - typedef Flat_orientation_d result_type; - - Construct_flat_orientation_d(const K &kernel) - : m_kernel(kernel) {} - - template - result_type operator()(ForwardIterator start, ForwardIterator end) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.construct_flat_orientation_d_object() ( - boost::make_transform_iterator(start, cp), - boost::make_transform_iterator(end, cp) - ); - } - }; - - - //=========================================================================== - - // Required by TriangulationTraits - class In_flat_orientation_d - { - const K &m_kernel; - - public: - typedef Orientation result_type; - - In_flat_orientation_d(const K &kernel) - : m_kernel(kernel) {} - - template - result_type operator()(Flat_orientation_d orient, - ForwardIterator start, ForwardIterator end) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.in_flat_orientation_d_object() ( - orient, - boost::make_transform_iterator(start, cp), - boost::make_transform_iterator(end, cp) - ); - } - }; - - //=========================================================================== - - // Required by TriangulationTraits - class Contained_in_affine_hull_d - { - const K &m_kernel; - - public: - typedef bool result_type; - - Contained_in_affine_hull_d(const K &kernel) - : m_kernel(kernel) {} - - template - result_type operator()(ForwardIterator start, ForwardIterator end, - const Weighted_point_d & p) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.contained_in_affine_hull_d_object() ( - boost::make_transform_iterator(start, cp), - boost::make_transform_iterator(end, cp), - cp(p) - ); - } - }; - - //=========================================================================== - - // Required by TriangulationTraits - class Compare_lexicographically_d - { - const K &m_kernel; - - public: - typedef Comparison_result result_type; - - Compare_lexicographically_d(const K &kernel) - : m_kernel(kernel) {} - - result_type operator()( - const Weighted_point_d & p, const Weighted_point_d & q) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.compare_lexicographically_d_object()(cp(p), cp(q)); - } - }; - - //=========================================================================== - - // Only for Triangulation_off_ostream.h (undocumented) - class Compute_coordinate_d - { - const K &m_kernel; - - public: - typedef FT result_type; - - Compute_coordinate_d(const K &kernel) - : m_kernel(kernel) {} - - result_type operator()( - const Weighted_point_d & p, const int i) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.compute_coordinate_d_object()(cp(p), i); - } - }; - - //=========================================================================== - - // To satisfy SpatialSortingTraits_d - // and also for Triangulation_off_ostream.h (undocumented) - class Point_dimension_d - { - const K &m_kernel; - - public: - typedef int result_type; - - Point_dimension_d(const K &kernel) - : m_kernel(kernel) {} - - result_type operator()( - const Weighted_point_d & p) const - { - Construct_point_d cp = m_kernel.construct_point_d_object(); - return m_kernel.point_dimension_d_object()(cp(p)); - } - }; - - //=========================================================================== - // Object creation - //=========================================================================== - - Less_coordinate_d less_coordinate_d_object() const - { - return Less_coordinate_d(*this); - } - Contained_in_affine_hull_d contained_in_affine_hull_d_object() const - { - return Contained_in_affine_hull_d(*this); - } - Orientation_d orientation_d_object() const - { - return Orientation_d(*this); - } - Construct_flat_orientation_d construct_flat_orientation_d_object() const - { - return Construct_flat_orientation_d(*this); - } - In_flat_orientation_d in_flat_orientation_d_object() const - { - return In_flat_orientation_d(*this); - } - Compare_lexicographically_d compare_lexicographically_d_object() const - { - return Compare_lexicographically_d(*this); - } - Compute_coordinate_d compute_coordinate_d_object() const - { - return Compute_coordinate_d(*this); - } - Point_dimension_d point_dimension_d_object() const - { - return Point_dimension_d(*this); - } -}; - - -} //namespace CGAL - -#endif // CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H diff --git a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h deleted file mode 100644 index 9a6030e5..00000000 --- a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H -#define CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H - -#include -#include -#include - -#include - -namespace CGAL { - -// POLICY TAG - -struct TDS_full_cell_default_storage_policy {}; // stores no additional data. Uses XOR trick. - -template< typename V, typename S, typename D, typename StoragePolicy > -struct TFC_data; // TFC = Triangulation Full Cell - -template< typename Vertex_handle, typename Full_cell_handle, typename Dimen > -struct TFC_data< Vertex_handle, Full_cell_handle, Dimen, TDS_full_cell_default_storage_policy > -{ - typedef typename internal::Dimen_plus_one::type Dimen_plus; - typedef typename internal::S_or_D_array< Vertex_handle, Dimen_plus, true > Vertex_handle_array; - typedef typename internal::S_or_D_array< Full_cell_handle, Dimen_plus > Full_cell_handle_array; - - Vertex_handle_array vertices_; - Full_cell_handle_array neighbors_; - - TFC_data(const int dmax) - : vertices_(dmax+1), neighbors_(dmax+1) - {} - void* for_compact_container() const { return vertices_.for_compact_container(); } - void* & for_compact_container() { return vertices_.for_compact_container(); } - int dimension() const { return ( vertices_.size() - 1 ); } - void set_mirror_index(const int, const int) {} -#ifdef BOOST_NO_INT64_T - typedef std::ptrdiff_t Xor_type; -#else - typedef boost::int_least64_t Xor_type; -#endif - Xor_type xor_of_vertices(const int cur_dim) const - { - Xor_type result(0); - for( int i = 0; i <= cur_dim; ++i ) - result ^= reinterpret_cast(&(*vertices_[i])); - return result; - } - // ASSUMES |*this| is indeed a neighbor of neighbor(i): - // NOT correct when the hole (in insert_in_hole) is doubly covered. - int mirror_index(const int i) const - { - int index = 0; - Full_cell_handle n = neighbors_[i]; - Full_cell_handle o = n->neighbor(index); - while( &(o->combinatorics_) != this ) - o = n->neighbor(++index); - return index; - } - Vertex_handle mirror_vertex(const int i, const int cur_dim) const - { - Xor_type opp_vertex = xor_of_vertices(cur_dim) - ^ neighbors_[i]->xor_of_vertices(cur_dim) - ^ reinterpret_cast(&(*vertices_[i])); - Vertex_handle mirror; - typedef typename Vertex_handle::pointer pointer; - // mirror.set_pointer(reinterpret_cast(opp_vertex)); - mirror = Compact_container - ::s_iterator_to(*(reinterpret_cast(opp_vertex))); - return mirror; - } - void swap_vertices(const int d1, const int d2) - { - std::swap(vertices_[d1], vertices_[d2]); - std::swap(neighbors_[d1], neighbors_[d2]); - } -}; - -} //namespace CGAL - -#endif // CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H diff --git a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h deleted file mode 100644 index 095dfe68..00000000 --- a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H -#define CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H - -#include - -namespace CGAL { - -// POLICY TAGS - -struct TDS_full_cell_mirror_storage_policy {}; // Stores the mirror index of all vertices. - -template< typename Vertex_handle, typename Full_cell_handle, typename Maximal_dimension > -struct TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_mirror_storage_policy > -: public TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_default_storage_policy > -{ - typedef TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_default_storage_policy > Base; - typedef typename Base::Vertex_handle_array Vertex_handle_array; - typedef typename Base::Full_cell_handle_array Full_cell_handle_array; - typedef typename internal::S_or_D_array< int, typename Base::Dimen_plus > Int_array; - -private: - Int_array mirror_vertices_; - -public: - TFC_data(const int dmax) - : Base(dmax), mirror_vertices_(dmax+1) - {} - - void set_mirror_index(const int i, const int index) - { - mirror_vertices_[i] = index; - } - int mirror_index(const int i) const - { - return mirror_vertices_[i]; - } - Vertex_handle mirror_vertex(const int i, const int) const - { - return Base::neighbors_[i]->vertex(mirror_index(i)); - } - void swap_vertices(const int d1, const int d2) - { - Base::swap_vertices(d1, d2); - std::swap(mirror_vertices_[d1], mirror_vertices_[d2]); - Base::neighbors_[d1]->set_mirror_index(mirror_vertices_[d1], d1); - Base::neighbors_[d2]->set_mirror_index(mirror_vertices_[d2], d2); - } -}; - -} //namespace CGAL - -#endif // CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation.h b/src/common/include/gudhi_patches/CGAL/Triangulation.h deleted file mode 100644 index 906df92e..00000000 --- a/src/common/include/gudhi_patches/CGAL/Triangulation.h +++ /dev/null @@ -1,1424 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TRIANGULATION_H -#define CGAL_TRIANGULATION_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -namespace CGAL { - -// Iterator which iterates over vertex_handle's, but returns a point when -// dereferenced. If the current -// vertex_handle vh == vh_where_point_should_be_substituted, it returns -// "subtitute_point", otherwise, it returns vh->point() -template -class Substitute_point_in_vertex_iterator -{ - typedef typename std::iterator_traits::value_type Vertex_handle; - typedef typename Vertex_handle::value_type Vertex; - typedef typename Vertex::Point Point; - -public: - typedef Point const& result_type; // For result_of - - Substitute_point_in_vertex_iterator( - Vertex_handle vh_where_point_should_be_substituted, - Point const *subtitute_point) - : vh_where_point_should_be_substituted_(vh_where_point_should_be_substituted) - , subtitute_point_(subtitute_point) - {} - - result_type operator()(Vertex_handle vh) const - { - if (vh == vh_where_point_should_be_substituted_) - return *subtitute_point_; - else - return vh->point(); - } - -private: - Vertex_handle vh_where_point_should_be_substituted_; - Point const *subtitute_point_; - -}; - - -template < class TriangulationTraits, class TDS_ = Default > -class Triangulation -{ - typedef typename TriangulationTraits::Dimension Maximal_dimension_; - typedef typename Default::Get, - Triangulation_full_cell > - >::type TDS; - typedef Triangulation Self; - -protected: - typedef typename TriangulationTraits::Flat_orientation_d Flat_orientation_d; - typedef typename TriangulationTraits::Construct_flat_orientation_d Construct_flat_orientation_d; - typedef typename TriangulationTraits::In_flat_orientation_d In_flat_orientation_d; - - // Wrapper - struct Coaffine_orientation_d - { - boost::optional* fop; - Construct_flat_orientation_d cfo; - In_flat_orientation_d ifo; - - Coaffine_orientation_d( - boost::optional& x, - Construct_flat_orientation_d const&y, - In_flat_orientation_d const&z) - : fop(&x), cfo(y), ifo(z) {} - - template - CGAL::Orientation operator()(Iter a, Iter b) const - { - if (*fop) - return ifo(fop->get(),a,b); - *fop = cfo(a,b); - CGAL_assertion(ifo(fop->get(),a,b) == CGAL::POSITIVE); - return CGAL::POSITIVE; - } - }; - - void reset_flat_orientation() - { - if (current_dimension() == preset_flat_orientation_.first) - { - CGAL_assertion(preset_flat_orientation_.second != NULL); - flat_orientation_ = *preset_flat_orientation_.second; - } - else - flat_orientation_ = boost::none; - } - - typedef typename TriangulationTraits::Orientation_d - Orientation_d; - -public: - - typedef TriangulationTraits Geom_traits; - typedef TDS Triangulation_ds; - - typedef typename TDS::Vertex Vertex; - typedef typename TDS::Full_cell Full_cell; - typedef typename TDS::Facet Facet; - typedef typename TDS::Face Face; - - typedef Maximal_dimension_ Maximal_dimension; - typedef typename Geom_traits::Point_d Point; - - typedef typename TDS::Vertex_handle Vertex_handle; - typedef typename TDS::Vertex_iterator Vertex_iterator; - typedef typename TDS::Vertex_const_handle Vertex_const_handle; - typedef typename TDS::Vertex_const_iterator Vertex_const_iterator; - - typedef typename TDS::Full_cell_handle Full_cell_handle; - typedef typename TDS::Full_cell_iterator Full_cell_iterator; - typedef typename TDS::Full_cell_const_handle Full_cell_const_handle; - typedef typename TDS::Full_cell_const_iterator Full_cell_const_iterator; - - typedef typename TDS::Facet_iterator Facet_iterator; - - typedef typename TDS::size_type size_type; - typedef typename TDS::difference_type difference_type; - - /// The type of location a new point is found lying on - enum Locate_type - { - ON_VERTEX = 0 // simplex of dimension 0 - , IN_FACE = 1 // simplex of dimension in [ 1, |current_dimension()| - 2 ] - , IN_FACET = 2 // simplex of dimension |current_dimension()| - 1 - , IN_FULL_CELL = 3 /// simplex of dimension |current_dimension()| - , OUTSIDE_CONVEX_HULL = 4 - , OUTSIDE_AFFINE_HULL = 5 - }; - - // Finite elements iterators - - class Finiteness_predicate; - - typedef boost::filter_iterator - Finite_vertex_iterator; - typedef boost::filter_iterator - Finite_vertex_const_iterator; - typedef boost::filter_iterator - Finite_full_cell_iterator; - typedef boost::filter_iterator - Finite_full_cell_const_iterator; - typedef boost::filter_iterator - Finite_facet_iterator; - -protected: // DATA MEMBERS - - Triangulation_ds tds_; - const Geom_traits kernel_; - Vertex_handle infinity_; - mutable std::vector orientations_; - mutable boost::optional flat_orientation_; - // The user can specify a Flat_orientation_d object to be used for - // orienting simplices of a specific dimension - // (= preset_flat_orientation_.first) - // preset_flat_orientation_.first = numeric_limits::max() otherwise) - std::pair preset_flat_orientation_; - // for stochastic walk in the locate() function: - mutable Random rng_; -#ifdef CGAL_TRIANGULATION_STATISTICS - mutable unsigned long walk_size_; -#endif - -protected: // HELPER FUNCTIONS - - typedef CGAL::Iterator_project< - typename Full_cell::Vertex_handle_const_iterator, - internal::Triangulation::Point_from_vertex_handle - > Point_const_iterator; - - Point_const_iterator points_begin(Full_cell_const_handle c) const - { return Point_const_iterator(c->vertices_begin()); } - Point_const_iterator points_end(Full_cell_const_handle c) const - { return Point_const_iterator(c->vertices_end()); } - Point_const_iterator points_begin(Full_cell_handle c) const - { return Point_const_iterator(c->vertices_begin()); } - Point_const_iterator points_end(Full_cell_handle c) const - { return Point_const_iterator(c->vertices_end()); } - -public: - - // FACETS OPERATIONS - - Full_cell_handle full_cell(const Facet & f) const - { - return tds().full_cell(f); - } - - int index_of_covertex(const Facet & f) const - { - return tds().index_of_covertex(f); - } - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - UTILITIES - - // A co-dimension 2 sub-simplex. called a Rotor because we can rotate - // the two "covertices" around the sub-simplex. Useful for traversing the - // boundary of a hole. NOT DOCUMENTED - typedef cpp11::tuple Rotor; - - // Commented out because it was causing "internal compiler error" in MSVC - /*Full_cell_handle full_cell(const Rotor & r) const // NOT DOCUMENTED - { - return cpp11::get<0>(r); - } - int index_of_covertex(const Rotor & r) const // NOT DOCUMENTED - { - return cpp11::get<1>(r); - } - int index_of_second_covertex(const Rotor & r) const // NOT DOCUMENTED - { - return cpp11::get<2>(r); - }*/ - Rotor rotate_rotor(Rotor & r) // NOT DOCUMENTED... - { - int opposite = cpp11::get<0>(r)->mirror_index(cpp11::get<1>(r)); - Full_cell_handle s = cpp11::get<0>(r)->neighbor(cpp11::get<1>(r)); - int new_second = s->index(cpp11::get<0>(r)->vertex(cpp11::get<2>(r))); - return Rotor(s, new_second, opposite); - } - - // - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS - - Triangulation(int dim, const Geom_traits &k = Geom_traits()) - : tds_(dim) - , kernel_(k) - , infinity_() - , preset_flat_orientation_((std::numeric_limits::max)(), - (Flat_orientation_d*) NULL) - , rng_((long)0) -#ifdef CGAL_TRIANGULATION_STATISTICS - ,walk_size_(0) -#endif - { - clear(); - } - - // With this constructor, - // the user can specify a Flat_orientation_d object to be used for - // orienting simplices of a specific dimension - // (= preset_flat_orientation_.first) - // It it used for by dark triangulations created by DT::remove - Triangulation( - int dim, - const std::pair &preset_flat_orientation, - const Geom_traits k = Geom_traits()) - : tds_(dim) - , kernel_(k) - , infinity_() - , preset_flat_orientation_(preset_flat_orientation) - , rng_((long)0) -#ifdef CGAL_TRIANGULATION_STATISTICS - ,walk_size_(0) -#endif - { - clear(); - } - - Triangulation(const Triangulation & t2) - : tds_(t2.tds_) - , kernel_(t2.kernel_) - , infinity_() - , preset_flat_orientation_((std::numeric_limits::max)(), - (Flat_orientation_d*) NULL) - , rng_(t2.rng_) -#ifdef CGAL_TRIANGULATION_STATISTICS - ,walk_size_(t2.walk_size_) -#endif - { - // We find the vertex at infinity by scanning the vertices of both - // triangulations. This works because Compact_container garantees that - // the vertices in the copy (*this) are stored in the same order as in - // the original triangulation (t2) - infinity_ = vertices_begin(); - Vertex_const_iterator inf2 = t2.vertices_begin(); - while( inf2 != t2.infinite_vertex() ) - { - ++infinity_; - ++inf2; - } - // A full_cell has at most 1 + maximal_dimension() facets: - orientations_.resize(1 + maximal_dimension()); - // Our coaffine orientation predicates HAS state member variables - reset_flat_orientation(); - } - - ~Triangulation() {} - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS FUNCTIONS - - /* These three function are no longer needed since we do not use them anymore - in the Delaunay_triangulation::remove. *But*, they may reappear in the future - if we manage to passe the information that flags/TDS_data is available or not - for marking simplices in Delaunay_triangulation::remove. This would be useful - to make it a little faster, instead of binary searching if a simplex is marked - or not... - // NOT DOCUMENTED -- - bool get_visited(Full_cell_handle s) const - { - return tds().get_visited(s); - } - // NOT DOCUMENTED -- - bool get_visited(Full_cell_const_handle s) const - { - return tds().get_visited(s); - } - - // NOT DOCUMENTED -- - void set_visited(Full_cell_handle s, bool b) const - { - tds().set_visited(s, b); - } */ - - Coaffine_orientation_d coaffine_orientation_predicate() const - { - return Coaffine_orientation_d ( - flat_orientation_, - geom_traits().construct_flat_orientation_d_object(), - geom_traits().in_flat_orientation_d_object() - ); - } - - const Triangulation_ds & tds() const - { - return tds_; - } - - Triangulation_ds & tds() - { - return tds_; - } - - const Geom_traits & geom_traits() const - { - return kernel_; - } - - int maximal_dimension() const { return tds().maximal_dimension(); } - int current_dimension() const { return tds().current_dimension(); } - - bool empty() const - { - return current_dimension() == -1; - } - - size_type number_of_vertices() const - { - return tds().number_of_vertices() - 1; - } - - size_type number_of_full_cells() const - { - return tds().number_of_full_cells(); - } - - Vertex_handle infinite_vertex() const - { - return infinity_; - } - - Full_cell_handle infinite_full_cell() const - { - CGAL_assertion(infinite_vertex()->full_cell()->has_vertex(infinite_vertex())); - return infinite_vertex()->full_cell(); - } - -// - - - - - - - - - - - - - - - - - - - - - - - - - NON CONSTANT-TIME ACCESS FUNCTIONS - - size_type number_of_finite_full_cells() const - { - Full_cell_const_iterator s = full_cells_begin(); - size_type result = number_of_full_cells(); - for( ; s != full_cells_end(); ++s ) - { - if( is_infinite(s) ) - --result; - } - return result; - } - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TRAVERSAL - - Vertex_iterator vertices_begin() { return tds().vertices_begin(); } - Vertex_iterator vertices_end() { return tds().vertices_end(); } - - Vertex_const_iterator vertices_begin() const { return tds().vertices_begin(); } - Vertex_const_iterator vertices_end() const { return tds().vertices_end(); } - - Finite_vertex_iterator finite_vertices_begin() - { return Finite_vertex_iterator(Finiteness_predicate(*this), vertices_begin(), vertices_end()); } - Finite_vertex_iterator finite_vertices_end() - { return Finite_vertex_iterator(Finiteness_predicate(*this), vertices_end(), vertices_end()); } - Finite_vertex_const_iterator finite_vertices_begin() const - { return Finite_vertex_const_iterator(Finiteness_predicate(*this), vertices_begin(), vertices_end()); } - Finite_vertex_const_iterator finite_vertices_end() const - { return Finite_vertex_const_iterator(Finiteness_predicate(*this), vertices_end(), vertices_end()); } - - Full_cell_iterator full_cells_begin() { return tds().full_cells_begin(); } - Full_cell_iterator full_cells_end() { return tds().full_cells_end(); } - - Full_cell_const_iterator full_cells_begin() const { return tds().full_cells_begin(); } - Full_cell_const_iterator full_cells_end() const { return tds().full_cells_end(); } - - Finite_full_cell_iterator finite_full_cells_begin() - { return Finite_full_cell_iterator(Finiteness_predicate(*this), full_cells_begin(), full_cells_end()); } - Finite_full_cell_iterator finite_full_cells_end() - { return Finite_full_cell_iterator(Finiteness_predicate(*this), full_cells_end(), full_cells_end()); } - Finite_full_cell_const_iterator finite_full_cells_begin() const - { return Finite_full_cell_const_iterator(Finiteness_predicate(*this), full_cells_begin(), full_cells_end()); } - Finite_full_cell_const_iterator finite_full_cells_end() const - { return Finite_full_cell_const_iterator(Finiteness_predicate(*this), full_cells_end(), full_cells_end()); } - - Facet_iterator facets_begin() { return tds().facets_begin(); } - Facet_iterator facets_end() { return tds().facets_end(); } - Facet_iterator finite_facets_begin() - { return Finite_facet_iterator(Finiteness_predicate(*this), facets_begin(), facets_end()); } - Facet_iterator finite_facets_end() - { return Finite_facet_iterator(Finiteness_predicate(*this), facets_end(), facets_end()); } - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SOME PREDICATE FUNCTORS - - class Finiteness_predicate - { - const Self & t_; - public: - Finiteness_predicate(const Self & t) : t_(t) {} - template < class T > - bool operator()(const T & t) const - { - return ! t_.is_infinite(t); - } - }; - - class Point_equality_predicate - { - const Point & o_; - public: - Point_equality_predicate(const Point & o) : o_(o) {} - bool operator()(const Point & o) const { return (o == o_ );} - }; - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SIMPLE QUERIES -/* - bool is_vertex(const Point & p, Vertex_handle & v, Full_cell_handle hint = Full_cell_handle()) const - { - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle s = locate(p, lt, f, ft, hint); - if( ON_VERTEX == lt ) - { - v = s->vertex(f.index(0)); - return true; - } - return false; - } - - bool is_vertex(Vertex_const_handle v) const - { - return tds().is_vertex(v); - } - - bool is_full_cell(Full_cell_const_handle s) const - { - return tds().is_full_cell(s); - } -*/ - - bool is_infinite(Vertex_const_handle v) const - { - CGAL_precondition(Vertex_const_handle() != v); - return (infinite_vertex() == v); - } - - bool is_infinite(const Vertex & v) const /* internal use, not documented */ - { - return (&(*infinite_vertex()) == &v); - } - - bool is_infinite(Full_cell_const_handle s) const - { - CGAL_precondition(Full_cell_const_handle() != s); - return is_infinite(*s); - } - bool is_infinite(const Full_cell & s) const /* internal use, not documented */ - { - for(int i = 0; i <= current_dimension(); ++i) - if( is_infinite(s.vertex(i)) ) - return true; - return false; - } - bool is_infinite(const Facet & ft) const - { - Full_cell_const_handle s = full_cell(ft); - CGAL_precondition(s != Full_cell_const_handle()); - if( is_infinite(s) ) - return (s->vertex(index_of_covertex(ft)) != infinite_vertex()); - return false; - } - - bool is_infinite(const Face & f) const - { - Full_cell_const_handle s = f.full_cell(); - CGAL_precondition(s != Full_cell_const_handle()); - if( is_infinite(s) ) - { - Vertex_handle v; - for( int i(0); i<= f.face_dimension(); ++i) - if ( is_infinite( f.vertex(i) )) return true; - } - return false; - } - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ELEMENT GATHERING - - - template< typename OutputIterator > - OutputIterator incident_full_cells(const Face & f, OutputIterator out) const - { - return tds().incident_full_cells(f, out); - } - template< typename OutputIterator > - OutputIterator incident_full_cells(Vertex_const_handle v, OutputIterator out) const - { - return tds().incident_full_cells(v, out); - } - template< typename OutputIterator > - OutputIterator star(const Face & f, OutputIterator out) const - { - return tds().star(f, out); - } - - template< typename OutputIterator > - OutputIterator incident_faces(Vertex_const_handle v, int d, OutputIterator out) const - { - return tds().incident_faces(v, d, out); - } - /* - template< typename OutputIterator, class Comparator > - OutputIterator incident_upper_faces( Vertex_const_handle v, int d, - OutputIterator out, Comparator cmp = Comparator()) - { - return tds().incident_upper_faces(v, d, out, cmp); - } - template< typename OutputIterator > - OutputIterator incident_upper_faces( Vertex_const_handle v, int d, - OutputIterator out) - { // FIXME: uncomment this function, since it uses a comparator specific to - // *geometric* triangulation (taking infinite vertex into account) - internal::Triangulation::Compare_vertices_for_upper_face cmp(*this); - return tds().incident_upper_faces(v, d, out, cmp); - } - */ - Orientation orientation(Full_cell_const_handle s, bool in_is_valid = false) const - { - if( ! in_is_valid ) - CGAL_assertion( ! is_infinite(s) ); - if( 0 == current_dimension() ) - return POSITIVE; - if( current_dimension() == maximal_dimension() ) - { - Orientation_d ori = geom_traits().orientation_d_object(); - return ori(points_begin(s), points_begin(s) + 1 + current_dimension()); - } - else - { - return coaffine_orientation_predicate()(points_begin(s), points_begin(s) + 1 + current_dimension()); - } - } - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - UPDATE OPERATIONS - - void clear() - { - tds_.clear(); - infinity_ = tds().insert_increase_dimension(); - // A full_cell has at most 1 + maximal_dimension() facets: - orientations_.resize(1 + maximal_dimension()); - // Our coaffine orientation predicates HAS state member variables - reset_flat_orientation(); -#ifdef CGAL_TRIANGULATION_STATISTICS - walk_size_ = 0; -#endif - } - - void set_current_dimension(int d) - { - tds().set_current_dimension(d); - } - - Full_cell_handle new_full_cell() - { - return tds().new_full_cell(); - } - - Vertex_handle new_vertex() - { - return tds().new_vertex(); - } - - Vertex_handle new_vertex(const Point & p) - { - return tds().new_vertex(p); - } - - void set_neighbors(Full_cell_handle s, int i, Full_cell_handle s1, int j) - { - tds().set_neighbors(s, i, s1, j); - } - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY - - bool is_valid(bool = false, int = 0) const; - bool are_incident_full_cells_valid(Vertex_const_handle, bool = false, int = 0) const; - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - POINT LOCATION - -protected: - template< typename OrientationPredicate > - Full_cell_handle do_locate(const Point &, Locate_type &, Face &, Facet &, - Full_cell_handle start, - const OrientationPredicate & o) const; -public: - Full_cell_handle locate(const Point &, Locate_type &, Face &, Facet &, - Full_cell_handle start = Full_cell_handle()) const; - Full_cell_handle locate(const Point &, Locate_type &, Face &, Facet &, - Vertex_handle) const; - Full_cell_handle locate(const Point & p, Full_cell_handle s = Full_cell_handle()) const; - Full_cell_handle locate(const Point & p, Vertex_handle v) const; - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS - - Vertex_handle contract_face(const Point &, const Face &); - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - POINT INSERTION - - template< typename ForwardIterator > - size_type insert(ForwardIterator start, ForwardIterator end) - { - size_type n = number_of_vertices(); - std::vector points(start, end); - spatial_sort(points.begin(), points.end(), geom_traits()); - Full_cell_handle hint = Full_cell_handle(); - typename std::vector::const_iterator s = points.begin(); - while( s != points.end() ) - { - hint = insert(*s++, hint)->full_cell(); - } - return number_of_vertices() - n; - } - Vertex_handle insert(const Point &, Locate_type, const Face &, const Facet &, Full_cell_handle); - Vertex_handle insert(const Point &, Full_cell_handle start = Full_cell_handle()); - Vertex_handle insert(const Point &, Vertex_handle); - template< typename ForwardIterator > - Vertex_handle insert_in_hole(const Point & p, ForwardIterator start, ForwardIterator end, const Facet & ft) - { - Emptyset_iterator out; - return insert_in_hole(p, start, end, ft, out); - } - template< typename ForwardIterator, typename OutputIterator > - Vertex_handle insert_in_hole(const Point & p, ForwardIterator start, ForwardIterator end, const Facet & ft, - OutputIterator out) - { - Vertex_handle v = tds().insert_in_hole(start, end, ft, out); - v->set_point(p); - return v; - } - Vertex_handle insert_in_face(const Point &, const Face &); - Vertex_handle insert_in_facet(const Point &, const Facet &); - Vertex_handle insert_in_full_cell(const Point &, Full_cell_handle); - Vertex_handle insert_outside_convex_hull_1(const Point &, Full_cell_handle); - Vertex_handle insert_outside_convex_hull(const Point &, Full_cell_handle); - Vertex_handle insert_outside_affine_hull(const Point &); - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - FACET-TRAVERSAL PREDICATES - - template< typename OrientationPredicate > - class Outside_convex_hull_traversal_predicate - { - Triangulation & t_; - const Point & p_; - OrientationPredicate const& ori_; - int cur_dim_; - public: - Outside_convex_hull_traversal_predicate(Triangulation & t, const Point & p, - OrientationPredicate const& ori) - : t_(t), p_(p), ori_(ori), cur_dim_(t.current_dimension()) {} - // FUTURE change parameter to const reference - bool operator()(Facet f) const - { - Full_cell_handle s = t_.full_cell(f); - const int i = t_.index_of_covertex(f); - Full_cell_handle n = s->neighbor(i); - if( ! t_.is_infinite(n) ) - return false; - int inf_v_index = n->index(t_.infinite_vertex()); - n->vertex(inf_v_index)->set_point(p_); - bool ok = (POSITIVE == ori_(t_.points_begin(n), t_.points_begin(n) + cur_dim_ + 1)); - return ok; - } - }; - - // make sure all full_cells have positive orientation - void reorient_full_cells(); - -protected: - // This is used in the |remove(v)| member function to manage sets of Full_cell_handles - template< typename FCH > - struct Full_cell_set : public std::vector - { - typedef std::vector Base_set; - using Base_set::begin; - using Base_set::end; - void make_searchable() - { // sort the full cell handles - std::sort(begin(), end()); - } - bool contains(const FCH & fch) const - { - return std::binary_search(begin(), end(), fch); - } - bool contains_1st_and_not_2nd(const FCH & fst, const FCH & snd) const - { - return ( ! contains(snd) ) && ( contains(fst) ); - } - }; - - void display_all_full_cells__debugging() const - { - std::cerr << "ALL FULL CELLS:" << std::endl; - for (Full_cell_const_iterator cit = full_cells_begin() ; - cit != full_cells_end() ; ++cit ) - { - std::cerr << std::hex << &*cit << ": "; - for (int jj = 0 ; jj <= current_dimension() ; ++jj) - std::cerr << (is_infinite(cit->vertex(jj)) ? 0xFFFFFFFF : (unsigned int)&*cit->vertex(jj)) << " - "; - std::cerr << std::dec << std::endl; - } - std::cerr << std::endl; - } - - -}; // Triangulation<...> - -// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = - -// CLASS MEMBER FUNCTIONS - -template < class TT, class TDS > -void -Triangulation -::reorient_full_cells() -{ - if( current_dimension() < 1 ) - return; - - Full_cell_iterator sit = full_cells_begin(); - Full_cell_iterator send = full_cells_end(); - for ( ; sit != send ; ++sit) - { - if( ! (is_infinite(sit) && (1 == current_dimension())) ) - { - sit->swap_vertices(current_dimension() - 1, current_dimension()); - } - } -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - - - - - THE REMOVAL METHODS - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::contract_face(const Point & p, const Face & f) -{ - CGAL_precondition( ! is_infinite(f) ); - Vertex_handle v = tds().contract_face(f); - v->set_point(p); - CGAL_expensive_postcondition_msg(are_incident_full_cells_valid(v), "new point is not where it should be"); - return v; -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - - - - - THE INSERTION METHODS - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert(const Point & p, Locate_type lt, const Face & f, const Facet & ft, Full_cell_handle s) -{ - switch( lt ) - { - case IN_FULL_CELL: - return insert_in_full_cell(p, s); - break; - case OUTSIDE_CONVEX_HULL: - return insert_outside_convex_hull(p, s); - break; - case OUTSIDE_AFFINE_HULL: - return insert_outside_affine_hull(p); - break; - case IN_FACET: - { - return insert_in_facet(p, ft); - break; - } - case IN_FACE: - return insert_in_face(p, f); - break; - case ON_VERTEX: - s->vertex(f.index(0))->set_point(p); - return s->vertex(f.index(0)); - break; - } - CGAL_assertion(false); - return Vertex_handle(); -} - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert(const Point & p, Full_cell_handle start) -{ - Locate_type lt; - Face f(maximal_dimension()); - Facet ft; - Full_cell_handle s = locate(p, lt, f, ft, start); - return insert(p, lt, f, ft, s); -} - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert(const Point & p, Vertex_handle v) -{ - if( Vertex_handle() == v ) - v = infinite_vertex(); - return insert(p, v->full_cell()); -} - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert_in_face(const Point & p, const Face & f) -{ - CGAL_precondition( ! is_infinite(f) ); - Vertex_handle v = tds().insert_in_face(f); - v->set_point(p); - return v; -} - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert_in_facet(const Point & p, const Facet & ft) -{ - CGAL_precondition( ! is_infinite(ft) ); - Vertex_handle v = tds().insert_in_facet(ft); - v->set_point(p); - return v; -} - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert_in_full_cell(const Point & p, Full_cell_handle s) -{ - CGAL_precondition( ! is_infinite(s) ); - Vertex_handle v = tds().insert_in_full_cell(s); - v->set_point(p); - return v; -} - -// NOT DOCUMENTED... -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert_outside_convex_hull_1(const Point & p, Full_cell_handle s) -{ - // This is a special case for dimension 1, because in that case, the right - // infinite full_cell is not correctly oriented... (sice its first vertex is the - // infinite one... - CGAL_precondition( is_infinite(s) ); - CGAL_precondition( 1 == current_dimension() ); - Vertex_handle v = tds().insert_in_full_cell(s); - v->set_point(p); - return v; -} - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert_outside_convex_hull(const Point & p, Full_cell_handle s) -{ - if( 1 == current_dimension() ) - { - return insert_outside_convex_hull_1(p, s); - } - CGAL_precondition( is_infinite(s) ); - CGAL_assertion( current_dimension() >= 2 ); - std::vector simps; - simps.reserve(64); - std::back_insert_iterator > out(simps); - if( current_dimension() < maximal_dimension() ) - { - Coaffine_orientation_d ori = coaffine_orientation_predicate(); - Outside_convex_hull_traversal_predicate - ochtp(*this, p, ori); - tds().gather_full_cells(s, ochtp, out); - } - else - { - Orientation_d ori = geom_traits().orientation_d_object(); - Outside_convex_hull_traversal_predicate - ochtp(*this, p, ori); - tds().gather_full_cells(s, ochtp, out); - } - int inf_v_index = s->index(infinite_vertex()); - Vertex_handle v = insert_in_hole( - p, simps.begin(), simps.end(), Facet(s, inf_v_index)); - return v; -} - -template < class TT, class TDS > -typename Triangulation::Vertex_handle -Triangulation -::insert_outside_affine_hull(const Point & p) -{ - CGAL_precondition( current_dimension() < maximal_dimension() ); - Vertex_handle v = tds().insert_increase_dimension(infinite_vertex()); - // reset the orientation predicate: - reset_flat_orientation(); - v->set_point(p); - if( current_dimension() >= 1 ) - { - Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); - int inf_v_index = inf_v_cell->index(infinite_vertex()); - Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); - Orientation o = orientation(s); - CGAL_assertion( COPLANAR != o ); - if( NEGATIVE == o ) - reorient_full_cells(); - - - // We just inserted the second finite point and the right infinite - // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be - // consistent with the rest of the cells - if (current_dimension() == 1) - { - // Is "inf_v_cell" the right infinite cell? - // Then inf_v_index should be 1 - if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 - && inf_v_index == 0) - { - inf_v_cell->swap_vertices( - current_dimension() - 1, current_dimension()); - } - // Otherwise, let's find the right infinite cell - else - { - inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2); - inf_v_index = inf_v_cell->index(infinite_vertex()); - // Is "inf_v_cell" the right infinite cell? - // Then inf_v_index should be 1 - if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 - && inf_v_index == 0) - { - inf_v_cell->swap_vertices( - current_dimension() - 1, current_dimension()); - } - } - } - } - return v; -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - THE MAIN LOCATE(...) FUNCTION - -template < class TT, class TDS > -template< typename OrientationPredicate > -typename Triangulation::Full_cell_handle -Triangulation -::do_locate(const Point & p, // query point - Locate_type & loc_type,// type of result (full_cell, face, vertex) - Face & face,// the face containing the query in its interior (when appropriate) - Facet & facet,// the facet containing the query in its interior (when appropriate) - Full_cell_handle start, // starting full_cell for the walk - OrientationPredicate const& orientation_pred - ) const -{ - const int cur_dim = current_dimension(); - - if( cur_dim == -1 ) - { - loc_type = OUTSIDE_AFFINE_HULL; - return Full_cell_handle(); - } - else if( cur_dim == 0 ) - { - Vertex_handle vit = infinite_full_cell()->neighbor(0)->vertex(0); - if( EQUAL != geom_traits().compare_lexicographically_d_object()(p, vit->point()) ) - { - loc_type = OUTSIDE_AFFINE_HULL; - return Full_cell_handle(); - } - else - { - loc_type = ON_VERTEX; - face.set_full_cell(vit->full_cell()); - face.set_index(0, 0); - return vit->full_cell(); - } - } - - Full_cell_handle s; - - // if we don't know where to start, we start from any bounded full_cell - if( Full_cell_handle() == start ) - { - // THE HACK THAT NOBODY SHOULD DO... BUT DIFFICULT TO WORK AROUND - // THIS... TODO: WORK AROUND IT - Full_cell_handle inf_c = const_cast(this)->infinite_full_cell(); - int inf_v_index = inf_c->index(infinite_vertex()); - s = inf_c->neighbor(inf_v_index); - } - else - { - s = start; - if( is_infinite(s) ) - { - int inf_v_index = s->index(infinite_vertex()); - s = s->neighbor(inf_v_index); - } - } - - // Check if query |p| is outside the affine hull - if( cur_dim < maximal_dimension() ) - { - if( ! geom_traits().contained_in_affine_hull_d_object()( - points_begin(s), - points_begin(s) + current_dimension() + 1, - p) ) - { - loc_type = OUTSIDE_AFFINE_HULL; - return Full_cell_handle(); - } - } - - // we remember the |previous|ly visited full_cell to avoid the evaluation - // of one |orientation| predicate - Full_cell_handle previous = Full_cell_handle(); - bool full_cell_not_found = true; - while(full_cell_not_found) // we walk until we locate the query point |p| - { - #ifdef CGAL_TRIANGULATION_STATISTICS - ++walk_size_; - #endif - // For the remembering stochastic walk, we need to start trying - // with a random index: - int j, i = rng_.get_int(0, cur_dim); - // we check |p| against all the full_cell's hyperplanes in turn - - for(j = 0; j <= cur_dim; ++j, i = (i + 1) % (cur_dim + 1) ) - { - Full_cell_handle next = s->neighbor(i); - if( previous == next ) - { // no need to compute the orientation, we already know it - orientations_[i] = POSITIVE; - continue; // go to next full_cell's facet - } - - Substitute_point_in_vertex_iterator< - typename Full_cell::Vertex_handle_const_iterator> - spivi(s->vertex(i), &p); - - orientations_[i] = orientation_pred( - boost::make_transform_iterator(s->vertices_begin(), spivi), - boost::make_transform_iterator(s->vertices_begin() + cur_dim + 1, - spivi)); - - if( orientations_[i] != NEGATIVE ) - { - // from this facet's point of view, we are inside the - // full_cell or on its boundary, so we continue to next facet - continue; - } - - // At this point, we know that we have to jump to the |next| - // full_cell because orientation_[i] == NEGATIVE - previous = s; - s = next; - if( is_infinite(next) ) - { // we have arrived OUTSIDE the convex hull of the triangulation, - // so we stop the search - full_cell_not_found = false; - loc_type = OUTSIDE_CONVEX_HULL; - face.set_full_cell(s); - } - break; - } // end of the 'for' loop - if( ( cur_dim + 1 ) == j ) // we found the full_cell containing |p| - full_cell_not_found = false; - } - // Here, we know in which full_cell |p| is in. - // We now check more precisely where |p| landed: - // vertex, facet, face or full_cell. - if( ! is_infinite(s) ) - { - face.set_full_cell(s); - int num(0); - int verts(0); - for(int i = 0; i < cur_dim; ++i) - { - if( orientations_[i] == COPLANAR ) - { - ++num; - facet = Facet(s, i); - } - else - face.set_index(verts++, i); - } - //-- We could put the if{}else{} below in the loop above, but then we would - // need to test if (verts < cur_dim) many times... we do it only once - // here: - if( orientations_[cur_dim] == COPLANAR ) - { - ++num; - facet = Facet(s, cur_dim); - } - else if( verts < cur_dim ) - face.set_index(verts, cur_dim); - //-- end of remark above // - if( 0 == num ) - { - loc_type = IN_FULL_CELL; - face.clear(); - } - else if( cur_dim == num ) - loc_type = ON_VERTEX; - else if( 1 == num ) - loc_type = IN_FACET; - else - loc_type = IN_FACE; - } - return s; -} - -template < class TT, class TDS > -typename Triangulation::Full_cell_handle -Triangulation -::locate( const Point & p, // query point - Locate_type & loc_type,// type of result (full_cell, face, vertex) - Face & face,// the face containing the query in its interior (when appropriate) - Facet & facet,// the facet containing the query in its interior (when appropriate) - Full_cell_handle start// starting full_cell for the walk - ) const -{ - if( current_dimension() == maximal_dimension() ) - { - Orientation_d ori = geom_traits().orientation_d_object(); - return do_locate(p, loc_type, face, facet, start, ori); - } - else - return do_locate(p, loc_type, face, facet, start, coaffine_orientation_predicate()); -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - the locate(...) variants - -template < class TT, class TDS > -typename Triangulation::Full_cell_handle -Triangulation -::locate( const Point & p, - Locate_type & loc_type, - Face & face, - Facet & facet, - Vertex_handle start) const -{ - if( Vertex_handle() == start ) - start = infinite_vertex(); - return locate(p, loc_type, face, facet, start->full_cell()); -} - -template < class TT, class TDS > -typename Triangulation::Full_cell_handle -Triangulation -::locate(const Point & p, Full_cell_handle s) const -{ - Locate_type lt; - Face face(maximal_dimension()); - Facet facet; - return locate(p, lt, face, facet, s); -} - -template < class TT, class TDS > -typename Triangulation::Full_cell_handle -Triangulation -::locate(const Point & p, Vertex_handle v) const -{ - if( Vertex_handle() != v ) - v = infinite_vertex(); - return this->locate(p, v->full_cell()); -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY - -template < class TT, class TDS > -bool -Triangulation -::is_valid(bool verbose, int level) const -{ - if( ! tds().is_valid(verbose, level) ) - return false; - - Full_cell_const_iterator c; - if( current_dimension() < 0 ) - return true; - Orientation o; - for( c = full_cells_begin(); c != full_cells_end(); ++c ) - { - if( is_infinite(c) ) - { - if( current_dimension() > 1 ) - { - int i = c->index( infinite_vertex() ); - Full_cell_handle n = c->neighbor(i); - infinite_vertex()->set_point(n->vertex(c->mirror_index(i))->point()); - o = - orientation(c, true); - } - else - o = POSITIVE; - } - else - o = orientation(c, true); - if( NEGATIVE == o ) - { - if( verbose ) CGAL_warning_msg(false, "full_cell is not correctly oriented"); - return false; - } - if( COPLANAR == o ) - { - if( verbose ) CGAL_warning_msg(false, "full_cell is flat"); - return false; - } - } - return true; -} - -template < class TT, class TDS > -bool Triangulation::are_incident_full_cells_valid(Vertex_const_handle v, bool verbose, int) const -{ - if( current_dimension() <= 0 ) - return true; - typedef std::vector Simps; - Simps simps; - simps.reserve(64); - std::back_insert_iterator out(simps); - incident_full_cells(v, out); - typename Simps::const_iterator sit = simps.begin(); - for( ; sit != simps.end(); ++sit ) - { - if( is_infinite(*sit) ) - continue; - Orientation o = orientation(*sit); - if( NEGATIVE == o ) - { - if( verbose ) CGAL_warning_msg(false, "full_cell is not correctly oriented"); - return false; - } - if( COPLANAR == o ) - { - if( verbose ) CGAL_warning_msg(false, "full_cell is flat"); - return false; - } - } - return true; -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: - -template < class TT, class TDS > -std::istream & -operator>>(std::istream & is, Triangulation & tr) - // reads : - // - the dimensions (maximal and current) - // - the number of finite vertices - // - the non combinatorial information on vertices (point, etc) - // - the number of full_cells - // - the full_cells by the indices of their vertices in the preceding list - // of vertices, plus the non combinatorial information on each full_cell - // - the neighbors of each full_cell by their index in the preceding list -{ - typedef Triangulation T; - typedef typename T::Vertex_handle Vertex_handle; - - // read current dimension and number of vertices - size_t n; - int cd; - if( is_ascii(is) ) - is >> cd >> n; - else - { - read(is, cd); - read(is, n, io_Read_write()); - } - - CGAL_assertion_msg( cd <= tr.maximal_dimension(), "input Triangulation has too high dimension"); - - tr.clear(); - tr.set_current_dimension(cd); - - if( n == 0 ) - return is; - - std::vector vertices; - vertices.resize(n+1); - vertices[0] = tr.infinite_vertex(); - is >> (*vertices[0]); - - // read the vertices: - size_t i(1); - while( i <= n ) - { - vertices[i] = tr.new_vertex(); - is >> (*vertices[i]); // read a vertex - ++i; - } - - // now, read the combinatorial information - return tr.tds().read_full_cells(is, vertices); -} - -template < class TT, class TDS > -std::ostream & -operator<<(std::ostream & os, const Triangulation & tr) - // writes : - // - the dimensions (maximal and current) - // - the number of finite vertices - // - the non combinatorial information on vertices (point, etc) - // - the number of full_cells - // - the full_cells by the indices of their vertices in the preceding list - // of vertices, plus the non combinatorial information on each full_cell - // - the neighbors of each full_cell by their index in the preceding list -{ - typedef Triangulation T; - typedef typename T::Vertex_const_handle Vertex_handle; - typedef typename T::Vertex_const_iterator Vertex_iterator; - - // outputs dimensions and number of vertices - size_t n = tr.number_of_vertices(); - if( is_ascii(os) ) - os << tr.current_dimension() << std::endl << n << std::endl; - else - { - write(os, tr.current_dimension()); - write(os, n, io_Read_write()); - } - - if( n == 0 ) - return os; - - size_t i(0); - // write the vertices - std::map index_of_vertex; - - // infinite vertex has index 0 (among all the vertices) - index_of_vertex[tr.infinite_vertex()] = i++; - os << *tr.infinite_vertex(); - for( Vertex_iterator it = tr.vertices_begin(); it != tr.vertices_end(); ++it ) - { - if( tr.is_infinite(it) ) - continue; - os << *it; // write the vertex - index_of_vertex[it] = i++; - } - CGAL_assertion( i == n+1 ); - - // output the combinatorial information - return tr.tds().write_full_cells(os, index_of_vertex); -} - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h b/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h deleted file mode 100644 index 2493c712..00000000 --- a/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h +++ /dev/null @@ -1,1603 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TRIANGULATION_DATA_STRUCTURE_H -#define CGAL_TRIANGULATION_DATA_STRUCTURE_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -namespace CGAL { - -template< class Dimen, - class Vb = Default, - class Fcb = Default > -class Triangulation_data_structure -{ - typedef Triangulation_data_structure Self; - typedef typename Default::Get >::type V_base; - typedef typename Default::Get >::type FC_base; - -public: - typedef typename V_base::template Rebind_TDS::Other Vertex; /* Concept */ - typedef typename FC_base::template Rebind_TDS::Other Full_cell; /* Concept */ - - // Tools to change the Vertex and Cell types of the TDS. - template < typename Vb2 > - struct Rebind_vertex { - typedef Triangulation_data_structure Other; - }; - - template < typename Fcb2 > - struct Rebind_full_cell { - typedef Triangulation_data_structure Other; - }; - - - - // we want to store an object of this class in every Full_cell: - class Full_cell_data - { - unsigned char bits_; - public: - Full_cell_data() : bits_(0) {} - Full_cell_data(const Full_cell_data & fcd) : bits_(fcd.bits_) {} - - void clear() { bits_ = 0; } - void mark_visited() { bits_ = 1; } - void clear_visited() { bits_ = 0; } - - bool is_clear() const { return bits_ == 0; } - bool is_visited() const { return bits_ == 1; } - // WARNING: if we use more bits and several bits can be set at once, - // then make sure to use bitwise operation above, instead of direct - // affectation. - }; - -protected: - typedef Compact_container Vertex_container; - typedef Compact_container Full_cell_container; - -public: - typedef Dimen Maximal_dimension; - - typedef typename Vertex_container::size_type size_type; /* Concept */ - typedef typename Vertex_container::difference_type difference_type; /* Concept */ - - typedef typename Vertex_container::iterator Vertex_handle; /* Concept */ - typedef typename Vertex_container::iterator Vertex_iterator; /* Concept */ - typedef typename Vertex_container::const_iterator Vertex_const_handle; - typedef typename Vertex_container::const_iterator Vertex_const_iterator; - - typedef typename Full_cell_container::iterator Full_cell_handle; /* Concept */ - typedef typename Full_cell_container::iterator Full_cell_iterator; /* Concept */ - typedef typename Full_cell_container::const_iterator Full_cell_const_handle; - typedef typename Full_cell_container::const_iterator Full_cell_const_iterator; - - typedef internal::Triangulation:: - Triangulation_ds_facet_iterator Facet_iterator; /* Concept */ - - /* The 2 types defined below, |Facet| and |Rotor| are used when traversing - the boundary `B' of the union of a set of full cells. |Rotor| makes it - easy to rotate around itself, in the search of neighbors in `B' (see - |rotate_rotor| and |insert_in_tagged_hole|) */ - - // A co-dimension 1 sub-simplex. - class Facet /* Concept */ - { - Full_cell_handle full_cell_; - int index_of_covertex_; - public: - Facet() : full_cell_(), index_of_covertex_(0) {} - Facet(Full_cell_handle f, int i) : full_cell_(f), index_of_covertex_(i) {} - Full_cell_handle full_cell() const { return full_cell_; } - int index_of_covertex() const { return index_of_covertex_; } - }; - - // A co-dimension 2 sub-simplex. called a Rotor because we can rotate - // the two "covertices" around the sub-simplex. Useful for traversing the - // boundary of a hole. NOT DOCUMENTED - class Rotor : public Facet - { - int index_of_second_covertex_; - public: - Rotor() : Facet(), index_of_second_covertex_(0) {} - Rotor(Full_cell_handle f, int first, int second) : Facet(f, first), index_of_second_covertex_(second) {} - int index_of_second_covertex() const { return index_of_second_covertex_; } - }; - - typedef Triangulation_face Face; /* Concept */ - -protected: // DATA MEMBERS - - int dmax_, dcur_; // dimension of the current triangulation - Vertex_container vertices_; // list of all vertices - Full_cell_container full_cells_; // list of all full cells - -private: - - void clean_dynamic_memory() - { - vertices_.clear(); - full_cells_.clear(); - } - - template < class Dim_tag > - struct get_maximal_dimension - { - static int value(int D) { return D; } - }; - // specialization - template < int D > - struct get_maximal_dimension > - { - static int value(int) { return D; } - }; - -public: - Triangulation_data_structure( int dim=0) /* Concept */ - : dmax_(get_maximal_dimension::value(dim)), dcur_(-2), - vertices_(), full_cells_() - { - CGAL_assertion_msg(dmax_ > 0, "maximal dimension must be positive."); - } - - ~Triangulation_data_structure() - { - clean_dynamic_memory(); - } - - Triangulation_data_structure(const Triangulation_data_structure & tds) - : dmax_(tds.dmax_), dcur_(tds.dcur_), - vertices_(tds.vertices_), full_cells_(tds.full_cells_) - { - typedef std::map V_map; - typedef std::map C_map; - V_map vmap; - C_map cmap; - Vertex_const_iterator vfrom = tds.vertices_begin(); - Vertex_iterator vto = vertices_begin(); - Full_cell_const_iterator cfrom = tds.full_cells_begin(); - Full_cell_iterator cto = full_cells_begin(); - while( vfrom != tds.vertices_end() ) - vmap[vfrom++] = vto++; - while( cfrom != tds.full_cells_end() ) - cmap[cfrom++] = cto++; - cto = full_cells_begin(); - while( cto != full_cells_end() ) - { - for( int i = 0; i <= (std::max)(0, current_dimension()); ++i ) - { - associate_vertex_with_full_cell(cto, i, vmap[cto->vertex(i)]); - cto->set_neighbor(i, cmap[cto->neighbor(i)]); - } - ++cto; - } - } - - // QUERIES - -protected: - - bool check_range(int i) const - { - if( current_dimension() < 0 ) - { - return (0 == i); - } - return ( (0 <= i) && (i <= current_dimension()) ); - } - -public: - - /* returns the current dimension of the full cells in the triangulation. */ - int maximal_dimension() const { return dmax_; } /* Concept */ - int current_dimension() const { return dcur_; } /* Concept */ - - size_type number_of_vertices() const /* Concept */ - { - return this->vertices_.size(); - } - size_type number_of_full_cells() const /* Concept */ - { - return this->full_cells_.size(); - } - - bool empty() const /* Concept */ - { - return current_dimension() == -2; - } - - Vertex_container & vertices() { return vertices_; } - const Vertex_container & vertices() const { return vertices_; } - Full_cell_container & full_cells() { return full_cells_; } - const Full_cell_container & full_cells() const { return full_cells_; } - - Vertex_handle vertex(Full_cell_handle s, int i) const /* Concept */ - { - CGAL_precondition(s != Full_cell_handle() && check_range(i)); - return s->vertex(i); - } - - Vertex_const_handle vertex(Full_cell_const_handle s, int i) const /* Concept */ - { - CGAL_precondition(s != Full_cell_handle() && check_range(i)); - return s->vertex(i); - } - - bool is_vertex(Vertex_const_handle v) const /* Concept */ - { - if( Vertex_const_handle() == v ) - return false; - Vertex_const_iterator vit = vertices_begin(); - while( vit != vertices_end() && ( v != vit ) ) - ++vit; - return v == vit; - } - - bool is_full_cell(Full_cell_const_handle s) const /* Concept */ - { - if( Full_cell_const_handle() == s ) - return false; - Full_cell_const_iterator sit = full_cells_begin(); - while( sit != full_cells_end() && ( s != sit ) ) - ++sit; - return s == sit; - } - - Full_cell_handle full_cell(Vertex_handle v) const /* Concept */ - { - CGAL_precondition(v != Vertex_handle()); - return v->full_cell(); - } - - Full_cell_const_handle full_cell(Vertex_const_handle v) const /* Concept */ - { - CGAL_precondition(Vertex_const_handle() != v); - return v->full_cell(); - } - - Full_cell_handle neighbor(Full_cell_handle s, int i) const /* Concept */ - { - CGAL_precondition(Full_cell_handle() != s && check_range(i)); - return s->neighbor(i); - } - - Full_cell_const_handle neighbor(Full_cell_const_handle s, int i) const/* Concept */ - { - CGAL_precondition(Full_cell_const_handle() != s && check_range(i)); - return s->neighbor(i); - } - - int mirror_index(Full_cell_handle s, int i) const /* Concept */ - { - CGAL_precondition(Full_cell_handle() != s && check_range(i)); - return s->mirror_index(i); - } - - int mirror_index(Full_cell_const_handle s, int i) const - { - CGAL_precondition(Full_cell_const_handle() != s && check_range(i)); /* Concept */ - return s->mirror_index(i); - } - - int mirror_vertex(Full_cell_handle s, int i) const /* Concept */ - { - CGAL_precondition(Full_cell_handle() != s && check_range(i)); - return s->mirror_vertex(i); - } - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - FACETS OPERATIONS - - // works for Face_ = Facet and Face_ = Rotor. - // NOT DOCUMENTED for the Rotor case... - template< typename Face_ > - Full_cell_handle full_cell(const Face_ & f) const /* Concept */ - { - return f.full_cell(); - } - - // works for Face_ = Facet and Face_ = Rotor. - // NOT DOCUMENTED for the Rotor case... - template< class Face_ > - int index_of_covertex(const Face_ & f) const /* Concept */ - { - return f.index_of_covertex(); - } - - // NOT DOCUMENTED - // A Rotor has two covertices - int index_of_second_covertex(const Rotor & f) const - { - return f.index_of_second_covertex(); - } - - // works for Face_ = Facet and Face_ = Rotor. - // NOT DOCUMENTED... - template< class Face_ > - bool is_boundary_facet(const Face_ & f) const - { - if( get_visited(neighbor(full_cell(f), index_of_covertex(f))) ) - return false; - if( ! get_visited(full_cell(f)) ) - return false; - return true; - } - - // NOT DOCUMENTED... - Rotor rotate_rotor(Rotor & f) - { - int opposite = mirror_index(full_cell(f), index_of_covertex(f)); - Full_cell_handle s = neighbor(full_cell(f), index_of_covertex(f)); - int new_second = s->index(vertex(full_cell(f), index_of_second_covertex(f))); - return Rotor(s, new_second, opposite); - } - - // NICE UPDATE OPERATIONS - -protected: - void do_insert_increase_dimension(Vertex_handle, Vertex_handle); -public: -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS - - Vertex_handle collapse_face(const Face &); /* Concept */ - void remove_decrease_dimension(Vertex_handle, Vertex_handle); /* Concept */ - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS - - Vertex_handle insert_in_full_cell(Full_cell_handle); /* Concept */ - Vertex_handle insert_in_face(const Face &); /* Concept */ - Vertex_handle insert_in_facet(const Facet &); /* Concept */ - template< typename Forward_iterator > - Vertex_handle insert_in_hole(Forward_iterator, Forward_iterator, Facet); /* Concept */ - template< typename Forward_iterator, typename OutputIterator > - Vertex_handle insert_in_hole(Forward_iterator, Forward_iterator, Facet, OutputIterator); /* Concept */ - - template< typename OutputIterator > - Full_cell_handle insert_in_tagged_hole(Vertex_handle, Facet, OutputIterator); - - Vertex_handle insert_increase_dimension(Vertex_handle=Vertex_handle()); /* Concept */ - -private: - - // Used by insert_in_tagged_hole - struct IITH_task - { - IITH_task( - Facet boundary_facet_, - int index_of_inside_cell_in_outside_cell_, - Full_cell_handle future_neighbor_ = Full_cell_handle(), - int new_cell_index_in_future_neighbor_ = -1, - int index_of_future_neighbor_in_new_cell_ = -1) - : boundary_facet(boundary_facet_), - index_of_inside_cell_in_outside_cell(index_of_inside_cell_in_outside_cell_), - future_neighbor(future_neighbor_), - new_cell_index_in_future_neighbor(new_cell_index_in_future_neighbor_), - index_of_future_neighbor_in_new_cell(index_of_future_neighbor_in_new_cell_) - {} - - // "new_cell" is the cell about to be created - Facet boundary_facet; - int index_of_inside_cell_in_outside_cell; - Full_cell_handle future_neighbor; - int new_cell_index_in_future_neighbor; - int index_of_future_neighbor_in_new_cell; - }; - - // NOT DOCUMENTED - void clear_visited_marks(Full_cell_handle) const; - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - DANGEROUS UPDATE OPERATIONS - -private: - - // NOT DOCUMENTED - template< typename FCH > // FCH = Full_cell_[const_]handle - bool get_visited(FCH c) const - { - return c->tds_data().is_visited(); - } - - // NOT DOCUMENTED - template< typename FCH > // FCH = Full_cell_[const_]handle - void set_visited(FCH c, bool m) const - { - if( m ) - c->tds_data().mark_visited(); - else - c->tds_data().clear_visited(); - } - -public: - - void clear() /* Concept */ - { - clean_dynamic_memory(); - dcur_ = -2; - } - - void set_current_dimension(int d) /* Concept */ - { - CGAL_precondition(-2<=d && d<=maximal_dimension()); - dcur_ = d; - } - - Full_cell_handle new_full_cell(Full_cell_handle s) - { - return full_cells_.emplace(*s); - } - - Full_cell_handle new_full_cell() /* Concept */ - { - return full_cells_.emplace(dmax_); - } - - void delete_full_cell(Full_cell_handle s) /* Concept */ - { - CGAL_precondition(Full_cell_handle() != s); - // CGAL_expensive_precondition(is_full_cell(s)); - full_cells_.erase(s); - } - - template< typename Forward_iterator > - void delete_full_cells(Forward_iterator start, Forward_iterator end) /* Concept */ - { - Forward_iterator s = start; - while( s != end ) - full_cells_.erase(*s++); - } - - template< class T > - Vertex_handle new_vertex( const T & t ) - { - return vertices_.emplace(t); - } - - Vertex_handle new_vertex() /* Concept */ - { - return vertices_.emplace(); - } - - void delete_vertex(Vertex_handle v) /* Concept */ - { - CGAL_precondition( Vertex_handle() != v ); - vertices_.erase(v); - } - - void associate_vertex_with_full_cell(Full_cell_handle s, int i, Vertex_handle v) /* Concept */ - { - CGAL_precondition(check_range(i)); - CGAL_precondition(s != Full_cell_handle()); - CGAL_precondition(v != Vertex_handle()); - s->set_vertex(i, v); - v->set_full_cell(s); - } - - void set_neighbors(Full_cell_handle s, int i, Full_cell_handle s1, int j) /* Concept */ - { - CGAL_precondition(check_range(i)); - CGAL_precondition(check_range(j)); - CGAL_precondition(s != Full_cell_handle()); - CGAL_precondition(s1 != Full_cell_handle()); - s->set_neighbor(i, s1); - s1->set_neighbor(j, s); - s->set_mirror_index(i, j); - s1->set_mirror_index(j, i); - } - - // SANITY CHECKS - - bool is_valid(bool = true, int = 0) const; /* Concept */ - - // NOT DOCUMENTED - template< class OutStream> void write_graph(OutStream &); - - Vertex_iterator vertices_begin() { return vertices_.begin(); } /* Concept */ - Vertex_iterator vertices_end() { return vertices_.end(); } /* Concept */ - Full_cell_iterator full_cells_begin() { return full_cells_.begin(); } /* Concept */ - Full_cell_iterator full_cells_end() { return full_cells_.end(); } /* Concept */ - - Vertex_const_iterator vertices_begin() const { return vertices_.begin(); } /* Concept */ - Vertex_const_iterator vertices_end() const { return vertices_.end(); } /* Concept */ - Full_cell_const_iterator full_cells_begin() const { return full_cells_.begin(); } /* Concept */ - Full_cell_const_iterator full_cells_end() const { return full_cells_.end(); } /* Concept */ - - Facet_iterator facets_begin() /* Concept */ - { - if( current_dimension() <= 0 ) - return facets_end(); - return Facet_iterator(*this); - } - Facet_iterator facets_end() /* Concept */ - { - return Facet_iterator(*this, 0); - } - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - FULL CELL GATHERING - - // a traversal predicate for gathering full_cells incident to a given face - // ``incident'' means that the given face is a subface of the full_cell - class Incident_full_cell_traversal_predicate - { - const Face & f_; - int dim_; - const Triangulation_data_structure & tds_; - public: - Incident_full_cell_traversal_predicate(const Triangulation_data_structure & tds, - const Face & f) - : f_(f), tds_(tds) - { - dim_ = f.face_dimension(); - } - bool operator()(const Facet & facet) const - { - Vertex_handle v = tds_.full_cell(facet)->vertex(tds_.index_of_covertex(facet)); - for( int i = 0; i <= dim_; ++i ) - { - if( v == f_.vertex(i) ) - return false; - } - return true; - } - }; - - // a traversal predicate for gathering full_cells having a given face as subface - class Star_traversal_predicate - { - const Face & f_; - int dim_; - const Triangulation_data_structure & tds_; - public: - Star_traversal_predicate(const Triangulation_data_structure & tds, - const Face & f) - : f_(f), tds_(tds) - { - dim_ = f.face_dimension(); - } - bool operator()(const Facet & facet) const - { - Full_cell_handle s = tds_.full_cell(facet)->neighbor(tds_.index_of_covertex(facet)); - for( int j = 0; j <= tds_.current_dimension(); ++j ) - { - for( int i = 0; i <= dim_; ++i ) - if( s->vertex(j) == f_.vertex(i) ) - return true; - } - return false; - } - }; - - template< typename TraversalPredicate, typename OutputIterator > - Facet gather_full_cells(Full_cell_handle, TraversalPredicate &, OutputIterator &) const; /* Concept */ - template< typename OutputIterator > - OutputIterator incident_full_cells(const Face &, OutputIterator) const; /* Concept */ - template< typename OutputIterator > - OutputIterator incident_full_cells(Vertex_const_handle, OutputIterator) const; /* Concept */ - template< typename OutputIterator > - OutputIterator star(const Face &, OutputIterator) const; /* Concept */ -#ifndef CGAL_CFG_NO_CPP0X_DEFAULT_TEMPLATE_ARGUMENTS_FOR_FUNCTION_TEMPLATES - template< typename OutputIterator, typename Comparator = std::less > - OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp = Comparator()) - { - return incident_faces(v, dim, out, cmp, true); - } - template< typename OutputIterator, typename Comparator = std::less > - OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, Comparator = Comparator(), bool = false) const; -#else - template< typename OutputIterator, typename Comparator > - OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp = Comparator()) - { - return incident_faces(v, dim, out, cmp, true); - } - template< typename OutputIterator > - OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out) - { - return incident_faces(v, dim, out, std::less(), true); - } - template< typename OutputIterator, typename Comparator > - OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, Comparator = Comparator(), bool = false) const; - template< typename OutputIterator > - OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, - std::less = std::less(), bool = false) const; -#endif - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INPUT / OUTPUT - - std::istream & read_full_cells(std::istream &, const std::vector &); - std::ostream & write_full_cells(std::ostream &, std::map &) const; - -}; // end of ``declaration/definition'' of Triangulation_data_structure<...> - -// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = - -// FUNCTIONS THAT ARE MEMBER FUNCTIONS: - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - - - - - THE GATHERING METHODS - -template< class Dim, class Vb, class Fcb > -template< typename OutputIterator > -OutputIterator -Triangulation_data_structure -::incident_full_cells(const Face & f, OutputIterator out) const /* Concept */ -{ - // CGAL_expensive_precondition_msg(is_full_cell(f.full_cell()), "the facet does not belong to the Triangulation"); - Incident_full_cell_traversal_predicate tp(*this, f); - gather_full_cells(f.full_cell(), tp, out); - return out; -} - -template< class Dim, class Vb, class Fcb > -template< typename OutputIterator > -OutputIterator -Triangulation_data_structure -::incident_full_cells(Vertex_const_handle v, OutputIterator out) const /* Concept */ -{ -// CGAL_expensive_precondition(is_vertex(v)); - CGAL_precondition(Vertex_handle() != v); - Face f(v->full_cell()); - f.set_index(0, v->full_cell()->index(v)); - return incident_full_cells(f, out); -} - -template< class Dim, class Vb, class Fcb > -template< typename OutputIterator > -OutputIterator -Triangulation_data_structure -::star(const Face & f, OutputIterator out) const /* Concept */ -{ - // CGAL_precondition_msg(is_full_cell(f.full_cell()), "the facet does not belong to the Triangulation"); - Star_traversal_predicate tp(*this, f); - gather_full_cells(f.full_cell(), tp, out); - return out; -} - -template< class Dim, class Vb, class Fcb > -template< typename TraversalPredicate, typename OutputIterator > -typename Triangulation_data_structure::Facet -Triangulation_data_structure -::gather_full_cells(Full_cell_handle start, - TraversalPredicate & tp, - OutputIterator & out) const /* Concept */ -{ - std::queue queue; - set_visited(start, true); - queue.push(start); - const int cur_dim = current_dimension(); - Facet ft; - while( ! queue.empty() ) - { - Full_cell_handle s = queue.front(); - queue.pop(); - *out = s; - ++out; - for( int i = 0; i <= cur_dim; ++i ) - { - Full_cell_handle n = s->neighbor(i); - if( ! get_visited(n) ) - { - set_visited(n, true); - if( tp(Facet(s, i)) ) - queue.push(n); - else - ft = Facet(s, i); - } - } - } - clear_visited_marks(start); - return ft; -} - -#ifdef CGAL_CFG_NO_CPP0X_DEFAULT_TEMPLATE_ARGUMENTS_FOR_FUNCTION_TEMPLATES -template< class Dim, class Vb, class Fcb > -template< typename OutputIterator > -OutputIterator -Triangulation_data_structure -::incident_faces(Vertex_const_handle v, int dim, OutputIterator out, - std::less cmp, bool upper_faces) const -{ - return incident_faces >(v, dim, out, cmp, upper_faces); -} -#endif - -template< class Dim, class Vb, class Fcb > -template< typename OutputIterator, typename Comparator > -OutputIterator -Triangulation_data_structure -::incident_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp, bool upper_faces) const -{ - CGAL_precondition( 0 < dim ); - if( dim >= current_dimension() ) - return out; - typedef std::vector Simplices; - Simplices simps; - simps.reserve(64); - // gather incident full_cells - std::back_insert_iterator sout(simps); - incident_full_cells(v, sout); - // for storing the handles to the vertices of a full_cell - typedef std::vector Vertices; - typedef std::vector Indices; - Vertices vertices(1 + current_dimension()); - Indices sorted_idx(1 + current_dimension()); - // setup Face comparator and Face_set - typedef internal::Triangulation::Compare_faces_with_common_first_vertex - Upper_face_comparator; - Upper_face_comparator ufc(dim); - typedef std::set Face_set; - Face_set face_set(ufc); - for( typename Simplices::const_iterator s = simps.begin(); s != simps.end(); ++s ) - { - int v_idx(0); // the index of |v| in the sorted full_cell - // get the vertices of the full_cell and sort them - for( int i = 0; i <= current_dimension(); ++i ) - vertices[i] = (*s)->vertex(i); - if( upper_faces ) - { - std::sort(vertices.begin(), vertices.end(), cmp); - while( vertices[v_idx] != v ) - ++v_idx; - } - else - { - while( vertices[v_idx] != v ) - ++v_idx; - if( 0 != v_idx ) - std::swap(vertices[0], vertices[v_idx]); - v_idx = 0; - typename Vertices::iterator vbegin(vertices.begin()); - ++vbegin; - std::sort(vbegin, vertices.end(), cmp); - } - if( v_idx + dim > current_dimension() ) - continue; // |v| is too far to the right - // stores the index of the vertices of s in the same order - // as in |vertices|: - for( int i = 0; i <= current_dimension(); ++i ) - sorted_idx[i] = (*s)->index(vertices[i]); - // init state for enumerating all candidate faces: - internal::Combination_enumerator f_idx(dim, v_idx + 1, current_dimension()); - Face f(*s); - f.set_index(0, sorted_idx[v_idx]); - while( ! f_idx.end() ) - { - for( int i = 0; i < dim; ++i ) - f.set_index(1 + i, sorted_idx[f_idx[i]]); - face_set.insert(f); // checks if face has already been found - - // compute next sorted face (lexicographic enumeration) - ++f_idx; - } - } - typename Face_set::iterator fit = face_set.begin(); - while( fit != face_set.end() ) - *out++ = *fit++; - return out; -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - - - - - THE REMOVAL METHODS - -template -typename Triangulation_data_structure::Vertex_handle -Triangulation_data_structure -::collapse_face(const Face & f) /* Concept */ -{ - const int fd = f.face_dimension(); - CGAL_precondition( (1 <= fd ) && (fd < current_dimension())); - std::vector simps; - // save the Face's vertices: - Full_cell s; - for( int i = 0; i <= fd; ++i ) - s.set_vertex(i, f.vertex(i)); - // compute the star of f - simps.reserve(64); - std::back_insert_iterator > out(simps); - star(f, out); - Vertex_handle v = insert_in_hole(simps.begin(), simps.end(), Facet(f.full_cell(), f.index(0))); - for( int i = 0; i <= fd; ++i ) - delete_vertex(s.vertex(i)); - return v; -} - -template -void -Triangulation_data_structure -::remove_decrease_dimension(Vertex_handle v, Vertex_handle star) /* Concept */ -{ - CGAL_assertion( current_dimension() >= -1 ); - if( -1 == current_dimension() ) - { - clear(); - return; - } - else if( 0 == current_dimension() ) - { - delete_full_cell(v->full_cell()); - delete_vertex(v); - star->full_cell()->set_neighbor(0, Full_cell_handle()); - set_current_dimension(-1); - return; - } - else if( 1 == current_dimension() ) - { - Full_cell_handle s = v->full_cell(); - int star_index; - if( s->has_vertex(star, star_index) ) - s = s->neighbor(star_index); - // Here, |star| is not a vertex of |s|, so it's the only finite - // full_cell - Full_cell_handle inf1 = s->neighbor(0); - Full_cell_handle inf2 = s->neighbor(1); - Vertex_handle v2 = s->vertex(1 - s->index(v)); - delete_vertex(v); - delete_full_cell(s); - inf1->set_vertex(1, Vertex_handle()); - inf1->set_vertex(1, Vertex_handle()); - inf2->set_neighbor(1, Full_cell_handle()); - inf2->set_neighbor(1, Full_cell_handle()); - associate_vertex_with_full_cell(inf1, 0, star); - associate_vertex_with_full_cell(inf2, 0, v2); - set_neighbors(inf1, 0, inf2, 0); - set_current_dimension(0); - return; - } - typedef std::vector Simplices; - Simplices simps; - incident_full_cells(v, std::back_inserter(simps)); - for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) - { - int v_idx = (*it)->index(v); - if( ! (*it)->has_vertex(star) ) - { - delete_full_cell((*it)->neighbor(v_idx)); - for( int i = 0; i <= current_dimension(); ++i ) - (*it)->vertex(i)->set_full_cell(*it); - } - else - star->set_full_cell(*it); - if( v_idx != current_dimension() ) - { - (*it)->swap_vertices(v_idx, current_dimension()); - (*it)->swap_vertices(current_dimension() - 2, current_dimension() - 1); - } - (*it)->set_vertex(current_dimension(), Vertex_handle()); - (*it)->set_neighbor(current_dimension(), Full_cell_handle()); - } - set_current_dimension(current_dimension()-1); - delete_vertex(v); -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - - - - - THE INSERTION METHODS - -template -typename Triangulation_data_structure::Vertex_handle -Triangulation_data_structure -::insert_in_full_cell(Full_cell_handle s) /* Concept */ -{ - CGAL_precondition(0 < current_dimension()); - CGAL_precondition(Full_cell_handle() != s); - // CGAL_expensive_precondition(is_full_cell(s)); - - const int cur_dim = current_dimension(); - Vertex_handle v = new_vertex(); - // the full_cell 'fc' is just used to store the handle to all the new full_cells. - Full_cell fc(maximal_dimension()); - for( int i = 1; i <= cur_dim; ++i ) - { - Full_cell_handle new_s = new_full_cell(s); - fc.set_neighbor(i, new_s); - associate_vertex_with_full_cell(new_s, i, v); - s->vertex(i-1)->set_full_cell(new_s); - set_neighbors(new_s, i, neighbor(s, i), mirror_index(s, i)); - } - fc.set_neighbor(0, s); - associate_vertex_with_full_cell(s, 0, v); - for( int i = 0; i <= cur_dim; ++i ) - for( int j = 0; j <= cur_dim; ++j ) - { - if( j == i ) continue; - set_neighbors(fc.neighbor(i), j, fc.neighbor(j), i); - } - return v; -} - -template -typename Triangulation_data_structure::Vertex_handle -Triangulation_data_structure -::insert_in_face(const Face & f) /* Concept */ -{ - std::vector simps; - simps.reserve(64); - std::back_insert_iterator > out(simps); - incident_full_cells(f, out); - return insert_in_hole(simps.begin(), simps.end(), Facet(f.full_cell(), f.index(0))); -} -template -typename Triangulation_data_structure::Vertex_handle -Triangulation_data_structure -::insert_in_facet(const Facet & ft) /* Concept */ -{ - Full_cell_handle s[2]; - s[0] = full_cell(ft); - int i = index_of_covertex(ft); - s[1] = s[0]->neighbor(i); - i = ( i + 1 ) % current_dimension(); - return insert_in_hole(s, s+2, Facet(s[0], i)); -} - -template -template < typename OutputIterator > -typename Triangulation_data_structure::Full_cell_handle -Triangulation_data_structure -::insert_in_tagged_hole(Vertex_handle v, Facet f, - OutputIterator new_full_cells) -{ - CGAL_assertion_msg(is_boundary_facet(f), "starting facet should be on the hole boundary"); - - const int cur_dim = current_dimension(); - Full_cell_handle new_s; - - std::queue task_queue; - task_queue.push( - IITH_task(f, mirror_index(full_cell(f), index_of_covertex(f))) ); - - while (!task_queue.empty()) - { - IITH_task task = task_queue.front(); - task_queue.pop(); - - Full_cell_handle old_s = full_cell(task.boundary_facet); - const int facet_index = index_of_covertex(task.boundary_facet); - - Full_cell_handle outside_neighbor = neighbor(old_s, facet_index); - // Here, "new_s" might actually be a new cell, but it might also be "old_s" - // if it has not been treated already in the meantime - new_s = neighbor(outside_neighbor, task.index_of_inside_cell_in_outside_cell); - // If the cell has not been treated yet - if (old_s == new_s) - { - new_s = new_full_cell(); - - int i(0); - for ( ; i < facet_index ; ++i) - associate_vertex_with_full_cell(new_s, i, old_s->vertex(i)); - ++i; // skip facet_index - for ( ; i <= cur_dim ; ++i) - associate_vertex_with_full_cell(new_s, i, old_s->vertex(i)); - associate_vertex_with_full_cell(new_s, facet_index, v); - set_neighbors(new_s, - facet_index, - outside_neighbor, - mirror_index(old_s, facet_index)); - - // add the new full_cell to the list of new full_cells - *new_full_cells++ = new_s; - - // check all of |Facet f|'s neighbors - for (i = 0 ; i <= cur_dim ; ++i) - { - if (facet_index == i) - continue; - // we define a |Rotor| because it makes it easy to rotate around - // in a self contained fashion. The corresponding potential - // boundary facet is Facet(full_cell(rot), index_of_covertex(rot)) - Rotor rot(old_s, i, facet_index); - // |rot| on line above, stands for Candidate Facet - while (!is_boundary_facet(rot)) - rot = rotate_rotor(rot); - - // we did find the |i|-th neighbor of Facet(old_s, facet_index)... - // has it already been extruded to center point |v| ? - Full_cell_handle inside = full_cell(rot); - Full_cell_handle outside = neighbor(inside, index_of_covertex(rot)); - // "m" is the vertex of outside which is not on the boundary - Vertex_handle m = inside->mirror_vertex(index_of_covertex(rot), current_dimension()); // CJTODO: use mirror_index? - // "index" is the index of m in "outside" - int index = outside->index(m); - // new_neighbor is the inside cell which is registered as the neighbor - // of the outside cell => it's either a newly created inside cell or an - // old inside cell which we are about to delete - Full_cell_handle new_neighbor = outside->neighbor(index); - - // Is new_neighbor still the old neighbor? - if (new_neighbor == inside) - { - task_queue.push(IITH_task( - Facet(inside, index_of_covertex(rot)), // boundary facet - index, // index_of_inside_cell_in_outside_cell - new_s, // future_neighbor - i, // new_cell_index_in_future_neighbor - index_of_second_covertex(rot) // index_of_future_neighbor_in_new_cell - )); - } - } - } - - // If there is some neighbor stories to fix - if (task.future_neighbor != Full_cell_handle()) - { - // now the new neighboring full_cell exists, we link both - set_neighbors(new_s, - task.index_of_future_neighbor_in_new_cell, - task.future_neighbor, - task.new_cell_index_in_future_neighbor); - } - } - - return new_s; -} - -template< class Dim, class Vb, class Fcb > -template< typename Forward_iterator, typename OutputIterator > -typename Triangulation_data_structure::Vertex_handle -Triangulation_data_structure -::insert_in_hole(Forward_iterator start, Forward_iterator end, Facet f, - OutputIterator out) /* Concept */ -{ - CGAL_expensive_precondition( - ( std::distance(start, end) == 1 ) - || ( current_dimension() > 1 ) ); - Forward_iterator sit = start; - while( end != sit ) - set_visited(*sit++, true); - Vertex_handle v = new_vertex(); - insert_in_tagged_hole(v, f, out); - delete_full_cells(start, end); - return v; -} - -template< class Dim, class Vb, class Fcb > -template< typename Forward_iterator > -typename Triangulation_data_structure::Vertex_handle -Triangulation_data_structure -::insert_in_hole(Forward_iterator start, Forward_iterator end, Facet f) /* Concept */ -{ - Emptyset_iterator out; - return insert_in_hole(start, end, f, out); -} - -template -void -Triangulation_data_structure -::clear_visited_marks(Full_cell_handle start) const // NOT DOCUMENTED -{ - CGAL_precondition(start != Full_cell_handle()); - - std::queue queue; - set_visited(start, false); - queue.push(start); - const int cur_dim = current_dimension(); - while( ! queue.empty() ) - { - Full_cell_handle s = queue.front(); - queue.pop(); - for( int i = 0; i <= cur_dim; ++i ) - { - if( get_visited(s->neighbor(i)) ) - { - set_visited(s->neighbor(i), false); - queue.push(s->neighbor(i)); - } - } - } -} - -template -void Triangulation_data_structure -::do_insert_increase_dimension(Vertex_handle x, Vertex_handle star) -{ - Full_cell_handle start = full_cells_begin(); - Full_cell_handle swap_me; - const int cur_dim = current_dimension(); - for( Full_cell_iterator S = full_cells_begin(); S != full_cells_end(); ++S ) - { - if( Vertex_handle() != S->vertex(cur_dim) ) - continue; - set_visited(S, true); - // extends full_cell |S| to include the new vertex as the - // current_dimension()-th vertex - associate_vertex_with_full_cell(S, cur_dim, x); - if( ! S->has_vertex(star) ) - { // S is bounded, we create its unbounded "twin" full_cell - Full_cell_handle S_new = new_full_cell(); - set_neighbors(S, cur_dim, S_new, 0); - associate_vertex_with_full_cell(S_new, 0, star); - // here, we could be clever so as to get consistent orientation - for( int k = 1; k <= cur_dim; ++k ) - associate_vertex_with_full_cell(S_new, k, vertex(S, k - 1)); - } - } - // now we setup the neighbors - set_visited(start, false); - std::queue queue; - queue.push(start); - while( ! queue.empty() ) - { - Full_cell_handle S = queue.front(); - queue.pop(); - // here, the first visit above ensured that all neighbors exist now. - // Now we need to connect them with adjacency relation - int star_index; - if( S->has_vertex(star, star_index) ) - { - set_neighbors( S, cur_dim, neighbor(neighbor(S, star_index), cur_dim), - // this is tricky :-) : - mirror_index(S, star_index) + 1); - } - else - { - Full_cell_handle S_new = neighbor(S, cur_dim); - for( int k = 0 ; k < cur_dim ; ++k ) - { - Full_cell_handle S_opp = neighbor(S, k); - if( ! S_opp->has_vertex(star) ) - set_neighbors(S_new, k + 1, neighbor(S_opp, cur_dim), mirror_index(S, k) + 1); - // neighbor of S_new opposite to v is S_new' - // the vertex opposite to v remains the same but ... - // remember the shifting of the vertices one step to the right - } - } - for( int k = 0 ; k < cur_dim ; ++k ) - if( get_visited(neighbor(S, k)) ) - { - set_visited(neighbor(S, k), false); - queue.push(neighbor(S, k)); - } - } - if( ( ( cur_dim % 2 ) == 0 ) && ( cur_dim > 1 ) ) - { - for( Full_cell_iterator S = full_cells_begin(); S != full_cells_end(); ++S ) - { - if( x != S->vertex(cur_dim) ) - S->swap_vertices(cur_dim - 1, cur_dim); - } - } - if( Full_cell_handle() != swap_me ) - swap_me->swap_vertices(1, 2); -} - -template -typename Triangulation_data_structure::Vertex_handle -Triangulation_data_structure -::insert_increase_dimension(Vertex_handle star) /* Concept */ -{ - const int prev_cur_dim = current_dimension(); - CGAL_precondition(prev_cur_dim < maximal_dimension()); - if( -2 != current_dimension() ) - { - CGAL_precondition( Vertex_handle() != star ); - CGAL_expensive_precondition(is_vertex(star)); - } - - set_current_dimension(prev_cur_dim + 1); - Vertex_handle v = new_vertex(); - switch( prev_cur_dim ) - { - case -2: - { // insertion of the first vertex - // ( geometrically : infinite vertex ) - Full_cell_handle s = new_full_cell(); - associate_vertex_with_full_cell(s, 0, v); - break; - } - case -1: - { // insertion of the second vertex - // ( geometrically : first finite vertex ) - //we create a triangulation of the 0-sphere, with - // vertices |star| and |v| - Full_cell_handle infinite_full_cell = star->full_cell(); - Full_cell_handle finite_full_cell = new_full_cell(); - associate_vertex_with_full_cell(finite_full_cell, 0, v); - set_neighbors(infinite_full_cell, 0, finite_full_cell, 0); - break; - } - default: - do_insert_increase_dimension(v, star); - break; - } - return v; -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY CHECKS - -template -bool Triangulation_data_structure -::is_valid(bool verbose, int /* level */) const /* Concept */ -{ - Full_cell_const_handle s, t; - Vertex_const_handle v; - int i, j, k; - - if( current_dimension() == -2 ) - { - if( ! vertices_.empty() || ! full_cells_.empty() ) - { - if( verbose ) CGAL_warning_msg(false, "current dimension is -2 but there are vertices or full_cells"); - return false; - } - } - - if( current_dimension() == -1 ) - { - if ( (number_of_vertices() != 1) || (number_of_full_cells() != 1) ) - { - if( verbose ) CGAL_warning_msg(false, "current dimension is -1 but there isn't one vertex and one full_cell"); - return false; - } - } - - for( v = vertices_begin(); v != vertices_end(); ++v ) - { - if( ! v->is_valid(verbose) ) - return false; - } - - // FUTURE: for each vertex v, gather incident full_cells. then, check that - // any full_cell containing v is among those gathered full_cells... - - if( current_dimension() < 0 ) - return true; - - for( s = full_cells_begin(); s != full_cells_end(); ++s ) - { - if( ! s->is_valid(verbose) ) - return false; - // check that the full cell has no duplicate vertices - for( i = 0; i <= current_dimension(); ++i ) - for( j = i + 1; j <= current_dimension(); ++j ) - if( vertex(s,i) == vertex(s,j) ) - { - CGAL_warning_msg(false, "a full_cell has two equal vertices"); - return false; - } - } - - for( s = full_cells_begin(); s != full_cells_end(); ++s ) - { - for( i = 0; i <= current_dimension(); ++i ) - if( (t = neighbor(s,i)) != Full_cell_const_handle() ) - { - int l = mirror_index(s,i); - if( s != neighbor(t,l) || i != mirror_index(t,l) ) - { - if( verbose ) CGAL_warning_msg(false, "neighbor relation is not symmetric"); - return false; - } - for( j = 0; j <= current_dimension(); ++j ) - if( j != i ) - { - // j must also occur as a vertex of t - for( k = 0; k <= current_dimension() && ( vertex(s,j) != vertex(t,k) || k == l); ++k ) - ; - if( k > current_dimension() ) - { - if( verbose ) CGAL_warning_msg(false, "too few shared vertices between neighbors full_cells."); - return false; - } - } - } - else - { - if( verbose ) CGAL_warning_msg(false, "full_cell has a NULL neighbor"); - return false; - } - } - return true; -} - -// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// - - - - - - - - - - - - - - - - - - - - - - - - INPUT / OUTPUT - -// NOT DOCUMENTED -template -template -void Triangulation_data_structure -::write_graph(OutStream & os) -{ - std::vector > edges; - os << number_of_vertices() + 1; // add the vertex at infinity - int count(1); - for( Vertex_iterator vit = vertices_begin(); vit != vertices_end(); ++vit ) - vit->idx_ = count++; - edges.resize(number_of_vertices()+1); - for( Full_cell_iterator sit = full_cells_begin(); sit != full_cells_end(); ++sit ) - { - int v1 = 0; - while( v1 < current_dimension() ) - { - int v2 = v1 + 1; - while( v2 <= current_dimension() ) - { - int i1, i2; - if( Vertex_handle() != sit-> vertex(v1) ) - i1 = sit->vertex(v1)->idx_; - else - i1 = 0; - if( Vertex_handle() != sit-> vertex(v2) ) - i2 = sit->vertex(v2)->idx_; - else - i2 = 0; - edges[i1].insert(i2); - edges[i2].insert(i1); - ++v2; - } - ++v1; - } - } - for( std::size_t i = 0; i < edges.size(); ++i ) - { - os << std::endl << edges[i].size(); - for( std::set::const_iterator nit = edges[i].begin(); - nit != edges[i].end(); ++nit ) - { - os << ' ' << (*nit); - } - } -} - -// NOT DOCUMENTED... -template -std::istream & -Triangulation_data_structure -::read_full_cells(std::istream & is, const std::vector & vertices) -{ - std::size_t m; // number of full_cells - int index; - const int cd = current_dimension(); - if( is_ascii(is) ) - is >> m; - else - read(is, m, io_Read_write()); - - std::vector full_cells; - full_cells.reserve(m); - // read the vertices of each full_cell - std::size_t i = 0; - while( i < m ) - { - Full_cell_handle s = new_full_cell(); - full_cells.push_back(s); - for( int j = 0; j <= cd; ++j ) - { - if( is_ascii(is) ) - is >> index; - else - read(is, index); - s->set_vertex(j, vertices[index]); - } - // read other non-combinatorial information for the full_cells - is >> (*s); - ++i; - } - - // read the neighbors of each full_cell - i = 0; - if( is_ascii(is) ) - while( i < m ) - { - for( int j = 0; j <= cd; ++j ) - { - is >> index; - full_cells[i]->set_neighbor(j, full_cells[index]); - } - ++i; - } - else - while( i < m ) - { - for( int j = 0; j <= cd; ++j ) - { - read(is, index); - full_cells[i]->set_neighbor(j, full_cells[index]); - } - ++i; - } - - // compute the mirror indices - for( i = 0; i < m; ++i ) - { - Full_cell_handle s = full_cells[i]; - for( int j = 0; j <= cd; ++j ) - { - if( -1 != s->mirror_index(j) ) - continue; - Full_cell_handle n = s->neighbor(j); - int k = 0; - Full_cell_handle nn = n->neighbor(k); - while( s != nn ) - nn = n->neighbor(++k); - s->set_mirror_index(j,k); - n->set_mirror_index(k,j); - } - } - return is; -} - -// NOT DOCUMENTED... -template -std::ostream & -Triangulation_data_structure -::write_full_cells(std::ostream & os, std::map & index_of_vertex) const -{ - std::map index_of_full_cell; - - std::size_t m = number_of_full_cells(); - - if( is_ascii(os) ) - os << std::endl << m; - else - write(os, m, io_Read_write()); - - const int cur_dim = current_dimension(); - // write the vertex indices of each full_cell - int i = 0; - for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it ) - { - index_of_full_cell[it] = i++; - if( is_ascii(os) ) - os << std::endl; - for( int j = 0; j <= cur_dim; ++j ) - { - if( is_ascii(os) ) - os << ' ' << index_of_vertex[it->vertex(j)]; - else - write(os, index_of_vertex[it->vertex(j)]); - } - // write other non-combinatorial information for the full_cells - os << (*it); - } - - CGAL_assertion( (std::size_t) i == m ); - - // write the neighbors of each full_cell - if( is_ascii(os) ) - for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it ) - { - os << std::endl; - for( int j = 0; j <= cur_dim; ++j ) - os << ' ' << index_of_full_cell[it->neighbor(j)]; - } - else - for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it ) - { - for( int j = 0; j <= cur_dim; ++j ) - write(os, index_of_full_cell[it->neighbor(j)]); - } - - return os; -} - -// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = - -// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: - -template -std::istream & -operator>>(std::istream & is, Triangulation_data_structure & tr) - // reads : - // - the dimensions (maximal and current) - // - the number of finite vertices - // - the non combinatorial information on vertices (point, etc) - // - the number of full_cells - // - the full_cells by the indices of their vertices in the preceding list - // of vertices, plus the non combinatorial information on each full_cell - // - the neighbors of each full_cell by their index in the preceding list -{ - typedef Triangulation_data_structure TDS; - typedef typename TDS::Vertex_handle Vertex_handle; - - // read current dimension and number of vertices - std::size_t n; - int cd; - if( is_ascii(is) ) - is >> cd >> n; - else - { - read(is, cd); - read(is, n, io_Read_write()); - } - - CGAL_assertion_msg( cd <= tr.maximal_dimension(), "input Triangulation_data_structure has too high dimension"); - - tr.clear(); - tr.set_current_dimension(cd); - - if( n == 0 ) - return is; - - std::vector vertices; - vertices.resize(n); - - // read the vertices: - std::size_t i(0); - while( i < n ) - { - vertices[i] = tr.new_vertex(); - is >> (*vertices[i]); // read a vertex - ++i; - } - - // now, read the combinatorial information - return tr.read_full_cells(is, vertices); -} - -template -std::ostream & -operator<<(std::ostream & os, const Triangulation_data_structure & tr) - // writes : - // - the dimensions (maximal and current) - // - the number of finite vertices - // - the non combinatorial information on vertices (point, etc) - // - the number of full cells - // - the full cells by the indices of their vertices in the preceding list - // of vertices, plus the non combinatorial information on each full_cell - // - the neighbors of each full_cell by their index in the preceding list -{ - typedef Triangulation_data_structure TDS; - typedef typename TDS::Vertex_const_handle Vertex_handle; - typedef typename TDS::Vertex_const_iterator Vertex_iterator; - - // outputs dimension and number of vertices - std::size_t n = tr.number_of_vertices(); - if( is_ascii(os) ) - os << tr.current_dimension() << std::endl << n; - else - { - write(os, tr.current_dimension()); - write(os, n, io_Read_write()); - } - - if( n == 0 ) - return os; - - // write the vertices - std::map index_of_vertex; - int i = 0; - for( Vertex_iterator it = tr.vertices_begin(); it != tr.vertices_end(); ++it, ++i ) - { - os << *it; // write the vertex - if (is_ascii(os)) - os << std::endl; - index_of_vertex[it] = i; - } - CGAL_assertion( (std::size_t) i == n ); - - // output the combinatorial information - return tr.write_full_cells(os, index_of_vertex); -} - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_DATA_STRUCTURE_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h deleted file mode 100644 index 541a6a85..00000000 --- a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TRIANGULATION_DS_FULL_CELL_H -#define CGAL_TRIANGULATION_DS_FULL_CELL_H - -#include -#include -#include -#include -#include -#include - -namespace CGAL { - -template< class TDS = void, typename FullCellStoragePolicy = Default > -class Triangulation_ds_full_cell -{ - typedef typename Default::Get::type - Storage_policy; - typedef Triangulation_ds_full_cell Self; - typedef typename TDS::Maximal_dimension Maximal_dimension; - -public: - typedef TDS Triangulation_data_structure; - typedef typename TDS::Face Face; - typedef typename TDS::Vertex_handle Vertex_handle; /* Concept */ - typedef typename TDS::Vertex_const_handle Vertex_const_handle; - typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */ - typedef typename TDS::Full_cell_const_handle Full_cell_const_handle; - typedef typename TDS::Full_cell_data TDS_data; /* data that the TDS wants to be stored here */ - template< typename TDS2 > - struct Rebind_TDS /* Concept */ - { - typedef Triangulation_ds_full_cell Other; - }; - -private: // STORAGE - typedef TFC_data< Vertex_handle, Full_cell_handle, - Maximal_dimension, Storage_policy > Combinatorics; - friend struct TFC_data< Vertex_handle, Full_cell_handle, - Maximal_dimension, Storage_policy >; - // array of vertices - typedef typename Combinatorics::Vertex_handle_array Vertex_handle_array; - // neighbor simplices - typedef typename Combinatorics::Full_cell_handle_array Full_cell_handle_array; - - // NOT DOCUMENTED... - typename Combinatorics::Xor_type xor_of_vertices(const int cur_dim) const - { - return combinatorics_.xor_of_vertices(cur_dim); - } - -public: - typedef typename Vertex_handle_array::const_iterator Vertex_handle_const_iterator; - typedef Vertex_handle_const_iterator Vertex_handle_iterator; /* Concept */ - - Triangulation_ds_full_cell(const int dmax) /* Concept */ - : combinatorics_(dmax), tds_data_() - { - CGAL_assertion( dmax > 0 ); - for( int i = 0; i <= dmax; ++i ) - { - set_neighbor(i, Full_cell_handle()); - set_vertex(i, Vertex_handle()); - set_mirror_index(i, -1); - } - } - - Triangulation_ds_full_cell(const Triangulation_ds_full_cell & s) /* Concept */ - : combinatorics_(s.combinatorics_), tds_data_(s.tds_data_) - {} - - ~Triangulation_ds_full_cell() {} - - int maximal_dimension() const /* Concept */ - { - return static_cast(vertices().size() - 1); - } - - Vertex_handle_const_iterator vertices_begin() const /* Concept */ - { - return vertices().begin(); - } - - Vertex_handle_const_iterator vertices_end() const /* Concept */ - { - return vertices().end(); - } - - Vertex_handle vertex(const int i) const /* Concept */ - { - CGAL_precondition(0<=i && i<=maximal_dimension()); - return vertices()[i]; - } - - Full_cell_handle neighbor(const int i) const /* Concept */ - { - CGAL_precondition(0<=i && i<=maximal_dimension()); - return neighbors()[i]; - } - - int mirror_index(const int i) const /* Concept */ - { - CGAL_precondition(0<=i && i<=maximal_dimension()); - return combinatorics_.mirror_index(i); - } - - // Advanced... - Vertex_handle mirror_vertex(const int i, const int cur_dim) const /* Concept */ - { - CGAL_precondition(0<=i && i<=maximal_dimension()); - return combinatorics_.mirror_vertex(i, cur_dim); - } - - int index(Full_cell_const_handle s) const /* Concept */ - { - // WE ASSUME THE FULL CELL WE ARE LOOKING FOR INDEED EXISTS ! - CGAL_precondition(has_neighbor(s)); - int index(0); - while( neighbor(index) != s ) - ++index; - return index; - } - - int index(Vertex_const_handle v) const /* Concept */ - { - // WE ASSUME THE VERTEX WE ARE LOOKING FOR INDEED EXISTS ! - CGAL_precondition(has_vertex(v)); - int index(0); - while( vertex(index) != v ) - ++index; - return index; - } - - void set_vertex(const int i, Vertex_handle v) /* Concept */ - { - CGAL_precondition(0<=i && i<=maximal_dimension()); - vertices()[i] = v; - } - - void set_neighbor(const int i, Full_cell_handle s) /* Concept */ - { - CGAL_precondition(0<=i && i<=maximal_dimension()); - neighbors()[i] = s; - } - - void set_mirror_index(const int i, const int index) /* Concept */ - { - CGAL_precondition(0<=i && i<=maximal_dimension()); - combinatorics_.set_mirror_index(i, index); - } - - bool has_vertex(Vertex_const_handle v) const /* Concept */ - { - int index; - return has_vertex(v, index); - } - - bool has_vertex(Vertex_const_handle v, int & index) const /* Concept */ - { - const int d = maximal_dimension(); - index = 0; - while( (index <= d) && (vertex(index) != v) ) - ++index; - return (index <= d); - } - - bool has_neighbor(Full_cell_const_handle s) const /* Concept */ - { - int index; - return has_neighbor(s, index); - } - - bool has_neighbor(Full_cell_const_handle s, int & index) const /* Concept */ - { - const int d = maximal_dimension(); - index = 0; - while( (index <= d) && (neighbor(index) != s) ) - ++index; - return (index <= d); - } - - void swap_vertices(const int d1, const int d2) /* Concept */ - { - CGAL_precondition(0 <= d1 && d1<=maximal_dimension()); - CGAL_precondition(0 <= d2 && d2<=maximal_dimension()); - combinatorics_.swap_vertices(d1, d2); - } - - const TDS_data & tds_data() const { return tds_data_; } /* Concept */ - TDS_data & tds_data() { return tds_data_; } /* Concept */ - - void* for_compact_container() const { return combinatorics_.for_compact_container(); } - void* & for_compact_container() { return combinatorics_.for_compact_container(); } - - bool is_valid(bool verbose = false, int = 0) const /* Concept */ - { - const int d = maximal_dimension(); - int i(0); - // test that the non-null Vertex_handles come first, before all null ones - while( i <= d && vertex(i) != Vertex_handle() ) ++i; - while( i <= d && vertex(i) == Vertex_handle() ) ++i; - if( i <= d ) - { - if( verbose ) CGAL_warning_msg(false, "full cell has garbage handles to vertices."); - return false; - } - for( i = 0; i <= d; ++i ) - { - if( Vertex_handle() == vertex(i) ) - break; // there are no more vertices - Full_cell_handle n(neighbor(i)); - if( Full_cell_handle() != n ) - { - int mirror_idx(mirror_index(i)); - if( n->neighbor(mirror_idx) == Full_cell_handle() ) - { - if( verbose ) CGAL_warning_msg(false, "neighbor has no back-neighbor."); - return false; - } - if( &(*(n->neighbor(mirror_idx))) != this ) - { - if( verbose ) CGAL_warning_msg(false, "neighbor does not point back to correct full cell."); - return false; - } - } - } - return true; - } - -private: - // access to data members: - Full_cell_handle_array & neighbors() {return combinatorics_.neighbors_; } - const Full_cell_handle_array & neighbors() const {return combinatorics_.neighbors_; } - Vertex_handle_array & vertices() {return combinatorics_.vertices_; } - const Vertex_handle_array & vertices() const {return combinatorics_.vertices_; } - - // DATA MEMBERS - Combinatorics combinatorics_; - mutable TDS_data tds_data_; -}; - -// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: - -template < typename TDS, typename SSP > -std::ostream & -operator<<(std::ostream & O, const Triangulation_ds_full_cell &) /* Concept */ -{ - /*if( is_ascii(O) ) - { - // os << '\n'; - } - else {}*/ - return O; -} - -template < typename TDS, typename SSP > -std::istream & -operator>>(std::istream & I, Triangulation_ds_full_cell &) /* Concept */ -{ - /*if( is_ascii(I) ) - {} - else {}*/ - return I; -} - -// Special case: specialization when template parameter is void. - -// we must declare it for each possible full_cell storage policy because : -// (GCC error:) default template arguments may not be used in partial specializations -template< typename StoragePolicy > -class Triangulation_ds_full_cell -{ -public: - typedef internal::Triangulation::Dummy_TDS TDS; - typedef TDS Triangulation_data_structure; - typedef TDS::Vertex_handle Vertex_handle; - typedef TDS::Vertex_const_handle Vertex_const_handle; - typedef TDS::Full_cell_handle Full_cell_handle; - typedef TDS::Full_cell_const_handle Full_cell_const_handle; - typedef TDS::Vertex_handle_const_iterator Vertex_handle_const_iterator; - typedef TDS::Full_cell_data TDS_data; - template - struct Rebind_TDS - { - typedef Triangulation_ds_full_cell Other; - }; - Vertex_handle_const_iterator vertices_begin(); - Vertex_handle_const_iterator vertices_end(); -}; - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_DS_FULL_CELL_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h deleted file mode 100644 index 381b97e1..00000000 --- a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TRIANGULATION_DS_VERTEX_H -#define CGAL_TRIANGULATION_DS_VERTEX_H - -#include -#include - -namespace CGAL { - -/* The template parameter TDS must be a model of the concept - * 'TriangulationDataStructure' that stores vertices of type - * 'Triangulation_ds_vertex' - */ -template< class TDS = void > -class Triangulation_ds_vertex -{ - typedef Triangulation_ds_vertex Self; - -public: - typedef TDS Triangulation_data_structure; - typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */ - - template - struct Rebind_TDS /* Concept */ - { - typedef Triangulation_ds_vertex Other; - }; - -protected: // DATA MEMBERS - Full_cell_handle full_cell_; // A handle to an incident full_cell - -public: - // Constructs a vertex with incident full_cell 's' - Triangulation_ds_vertex(Full_cell_handle s) : full_cell_(s) /* Concept */ - { - CGAL_assertion( Full_cell_handle() != s ); - } - // Constructs a vertex with no incident full_cell - Triangulation_ds_vertex() : full_cell_() {} /* Concept */ - - ~Triangulation_ds_vertex() {} - - /// Set 's' as an incident full_cell - void set_full_cell(Full_cell_handle s) /* Concept */ - { - full_cell_ = s; - } - - /// Returns a full_cell incident to the vertex - Full_cell_handle full_cell() const /* Concept */ - { - return full_cell_; - } - - bool is_valid(bool verbose = false, int /* level */ = 0) const /* Concept */ - { - if( Full_cell_handle() == full_cell() ) - { - if( verbose ) - CGAL_warning_msg(false, "vertex has no incident full cell."); - return false; - } - bool found(false); - // These two typename below are OK because TDS fullfils the - // TriangulationDataStructure concept. - typename TDS::Full_cell::Vertex_handle_iterator vit(full_cell()->vertices_begin()); - typedef typename TDS::Vertex_handle Vertex_handle; - while( vit != full_cell()->vertices_end() ) - { - if( Vertex_handle() == *vit ) - break; // The full cell has no more vertices - if( this == &(**vit) ) - { - found = true; - break; - } - ++vit; - } - if( ! found ) - { - if( verbose ) - CGAL_warning_msg(false, "vertex's adjacent full cell does not contain that vertex."); - return false; - } - return true; - } - -public: // FOR MEMORY MANAGEMENT - - void* for_compact_container() const { return full_cell_.for_compact_container(); } - void* & for_compact_container() { return full_cell_.for_compact_container(); } - -}; // end of Triangulation_ds_vertex - -// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: - -template < class TDS > -std::istream & -operator>>(std::istream & is, Triangulation_ds_vertex &) /* Concept */ -{ - /*if( is_ascii(is) ) - {} - else {}*/ - return is; -} - -template< class TDS > -std::ostream & -operator<<(std::ostream & os, const Triangulation_ds_vertex &) /* Concept */ -{ - /*if( is_ascii(os) ) - { - os << '\n'; - } - else {}*/ - return os; -} - -// Special case: specialization when template parameter is void. - -template<> -class Triangulation_ds_vertex -{ -public: - typedef internal::Triangulation::Dummy_TDS Triangulation_data_structure; - typedef Triangulation_data_structure::Full_cell_handle Full_cell_handle; /* Concept */ - template - struct Rebind_TDS /* Concept */ - { - typedef Triangulation_ds_vertex Other; - }; -}; - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_DS_VERTEX_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_face.h b/src/common/include/gudhi_patches/CGAL/Triangulation_face.h deleted file mode 100644 index bc9c1781..00000000 --- a/src/common/include/gudhi_patches/CGAL/Triangulation_face.h +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TRIANGULATION_FACE_H -#define CGAL_TRIANGULATION_FACE_H - -#include -#include - -namespace CGAL { - -template< typename TDS > -class Triangulation_face -{ - typedef typename internal::Dimen_plus_one::type Dimen_plus; -public: - typedef TDS Triangulation_data_structure; - typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */ - typedef typename TDS::Vertex_handle Vertex_handle; /* Concept */ - typedef internal::S_or_D_array Indices; - -protected: - Full_cell_handle full_cell_; - Indices indices_; - -public: - explicit Triangulation_face(Full_cell_handle s) /* Concept */ - : full_cell_(s), indices_(s->maximal_dimension()+2) - { - CGAL_assertion( Full_cell_handle() != s ); - clear(); - } - - explicit Triangulation_face(const int maximal_dim) /* Concept */ - : full_cell_(), indices_(maximal_dim+2) - { - clear(); - } - - Triangulation_face(const Triangulation_face & f) /* Concept */ - : full_cell_(f.full_cell_), indices_(f.indices_) - {} - - int face_dimension() const /* Concept */ - { - int i(0); - while( -1 != indices_[i] ) ++i; - return (i-1); - } - - Full_cell_handle full_cell() const /* Concept */ - { - return full_cell_; - } - - int index(const int i) const /* Concept */ - { - CGAL_precondition( (0 <= i) && (i <= face_dimension()) ); - return indices_[i]; - } - - Vertex_handle vertex(const int i) const /* Concept */ - { - int j = index(i); - if( j == -1 ) - return Vertex_handle(); - return full_cell()->vertex(j); - } - -// - - - - - - - - - - - - - - - - - - UPDATE FUNCTIONS - - void clear() /* Concept */ - { - const std::size_t d = indices_.size(); - for(std::size_t i = 0; i < d; ++i ) - indices_[i] = -1; - } - - void set_full_cell(Full_cell_handle s) /* Concept */ - { - CGAL_precondition( Full_cell_handle() != s ); - full_cell_ = s; - } - - void set_index(const int i, const int idx) /* Concept */ - { - CGAL_precondition( (0 <= i) && ((size_t)i+1 < indices_.size()) ); - CGAL_precondition( (0 <= idx) && ((size_t)idx < indices_.size()) ); - indices_[i] = idx; - } -}; - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_FACE_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h b/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h deleted file mode 100644 index a0c5246f..00000000 --- a/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TRIANGULATION_SIMPLEX_H -#define CGAL_TRIANGULATION_SIMPLEX_H - -#include -#include -#include -#include - -namespace CGAL { - -struct No_full_cell_data {}; - -template< class TriangulationTraits, typename Data_ = No_full_cell_data, class TDSFullCell = Default > -class Triangulation_full_cell : public Default::Get >::type -{ - // The default type for TDSFullCell is Triangulation_ds_full_cell<> : - typedef typename Default::Get >::type - Base; - typedef Triangulation_full_cell Self; -public: - typedef Data_ Data; - typedef typename Base::Vertex_handle Vertex_handle; - typedef typename Base::Vertex_const_handle Vertex_const_handle; - typedef typename Base::Vertex_handle_const_iterator Vertex_handle_const_iterator; - typedef typename Base::Full_cell_const_handle Full_cell_const_handle; - typedef typename TriangulationTraits::Point_d Point; - typedef typename TriangulationTraits::Point_d Point_d; - -private: // DATA MEMBERS - Data data_; - -public: - - using Base::vertices_begin; - using Base::vertices_end; - - template< class TDS2 > - struct Rebind_TDS - { - typedef typename Base::template Rebind_TDS::Other TDSFullCell2; - typedef Triangulation_full_cell Other; - }; - - Triangulation_full_cell(const int d) - : Base(d), data_() {} - - Triangulation_full_cell(const Self & s) - : Base(s), data_(s.data_) {} - - const Data & data() const - { - return data_; - } - - Data & data() - { - return data_; - } - - struct Point_from_vertex_handle - { - typedef Vertex_handle argument_type; - typedef Point result_type; - result_type & operator()(argument_type & x) const - { - return x->point(); - } - const result_type & operator()(const argument_type & x) const - { - return x->point(); - } - }; - -protected: - - typedef CGAL::Iterator_project< - Vertex_handle_const_iterator, - internal::Triangulation::Point_from_vertex_handle - > Point_const_iterator; - - Point_const_iterator points_begin() const - { return Point_const_iterator(Base::vertices_begin()); } - Point_const_iterator points_end() const - { return Point_const_iterator(Base::vertices_end()); } -}; - -// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: - -inline -std::istream & -operator>>(std::istream & is, No_full_cell_data &) -{ - return is; -} - -inline -std::ostream & -operator<<(std::ostream & os, const No_full_cell_data &) -{ - return os; -} - -template < typename TDS, typename Data, typename SSP > -std::ostream & -operator<<(std::ostream & O, const Triangulation_full_cell & s) -{ - /*if( is_ascii(O) ) - { - // os << '\n'; - } - else {}*/ - O << s.data(); - return O; -} - -template < typename TDS, typename Data, typename SSP > -std::istream & -operator>>(std::istream & I, Triangulation_full_cell & s) -{ - /*if( is_ascii(I) ) - {} - else {}*/ - I >> s.data(); - return I; -} - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_SIMPLEX_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h b/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h deleted file mode 100644 index f364717f..00000000 --- a/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_TRIANGULATION_VERTEX_H -#define CGAL_TRIANGULATION_VERTEX_H - -#include -#include - -namespace CGAL { - -struct No_vertex_data {}; - -template< class TriangulationTraits, typename Data_ = No_vertex_data, class TDSVertex = Default > -class Triangulation_vertex : public Default::Get >::type -{ - // The default type for TDSVertex is Triangulation_ds_vertex<> : - typedef typename Default::Get >::type - Base; - typedef Triangulation_vertex Self; -public: - typedef Data_ Data; - typedef typename TriangulationTraits::Point_d Point; - typedef typename TriangulationTraits::Point_d Point_d; - typedef typename Base::Full_cell_handle Full_cell_handle; - - template - struct Rebind_TDS - { - typedef typename Base::template Rebind_TDS::Other TDSVertex2; - typedef Triangulation_vertex Other; - }; - -private: // DATA MEMBERS - Point point_; - Data data_; - -public: - template< typename T > - Triangulation_vertex(Full_cell_handle s, const Point & p, const T & t) - : Base(s), point_(p), data_(t) {} - Triangulation_vertex(Full_cell_handle s, const Point & p) - : Base(s), point_(p), data_() {} - template< typename T > - Triangulation_vertex(const Point & p, const T & t) - : Base(), point_(p), data_(t) {} - Triangulation_vertex(const Point & p) - : Base(), point_(p), data_() {} - Triangulation_vertex() : Base(), point_(), data_() {} - - ~Triangulation_vertex() {} - - /// Set the position in space of the vertex to 'p' - void set_point(const Point & p) - { - point_ = p; - } - - /// Returns the position in space of the vertex - const Point & point() const - { - return point_; - } - - const Data & data() const - { - return data_; - } - - Data & data() - { - return data_; - } - -}; // end of Triangulation_vertex - -// NON CLASS-MEMBER FUNCTIONS - -inline -std::istream & -operator>>(std::istream & is, No_vertex_data &) -{ - return is; -} - -inline -std::ostream & -operator<<(std::ostream & os, const No_vertex_data &) -{ - return os; -} - -template < class A, typename Data, class B > -std::istream & -operator>>(std::istream & is, Triangulation_vertex & v) -{ - is >> v.point(); - return (is >> v.data()); -} - -template< class A, typename Data, class B > -std::ostream & -operator<<(std::ostream & os, const Triangulation_vertex & v) -{ - os << v.point(); - os << v.data(); - return os; -} - -} //namespace CGAL - -#endif // CGAL_TRIANGULATION_VERTEX_H diff --git a/src/common/include/gudhi_patches/CGAL/argument_swaps.h b/src/common/include/gudhi_patches/CGAL/argument_swaps.h deleted file mode 100644 index aa16f29b..00000000 --- a/src/common/include/gudhi_patches/CGAL/argument_swaps.h +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_ARGUMENT_SWAPS_H -#define CGAL_ARGUMENT_SWAPS_H - -#include -#include - -#ifndef CGAL_CXX11 -#include -#include -#endif - -namespace CGAL { - -#ifdef CGAL_CXX11 - -namespace internal { - -template struct Apply_to_last_then_rest_; - -template -struct Apply_to_last_then_rest_ { - typedef typename Apply_to_last_then_rest_::result_type result_type; - inline result_type operator()(F&&f,T&&t,U&&...u)const{ - return Apply_to_last_then_rest_()( - std::forward(f), - std::forward(u)..., - std::forward(t)); - } -}; - -template -struct Apply_to_last_then_rest_<0,F,T,U...> { - typedef decltype(std::declval()(std::declval(), std::declval()...)) result_type; - inline result_type operator()(F&&f,T&&t,U&&...u)const{ - return std::forward(f)(std::forward(t), std::forward(u)...); - } -}; - -} // namespace internal - - -struct Apply_to_last_then_rest { - template inline - typename internal::Apply_to_last_then_rest_::result_type - operator()(F&&f,T&&t,U&&...u)const{ - return internal::Apply_to_last_then_rest_()( - std::forward(f), - std::forward(t), - std::forward(u)...); - } -}; - -#else // CGAL_CXX11 - -struct Apply_to_last_then_rest { -#define CGAL_CODE(Z,N,_) template \ - typename boost::result_of::type \ - operator()(F const&f, BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t), T const&t) const { \ - return f(t,BOOST_PP_ENUM_PARAMS(N,t)); \ - } - BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) -#undef CGAL_CODE -}; - -#endif // CGAL_CXX11 - -} // namespace CGAL - -#endif // CGAL_ARGUMENT_SWAPS_H diff --git a/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h b/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h deleted file mode 100644 index e1bad64e..00000000 --- a/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_DETVEC_H -#define CGAL_DETVEC_H -#include -#include - -namespace CGAL { - // TODO: determine whether it is better to pass them by lines or columns. - - template inline - NT determinant_of_vectors(Vector const&a, Vector const&b){ - return determinant(a[0],a[1],b[0],b[1]); - } - template inline - typename Sgn::result_type - sign_of_determinant_of_vectors(Vector const&a, Vector const&b){ - return sign_of_determinant(a[0],a[1],b[0],b[1]); - } - - template - NT determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c){ - return determinant(a[0],a[1],a[2],b[0],b[1],b[2],c[0],c[1],c[2]); - } - template - typename Sgn::result_type - sign_of_determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c){ - return sign_of_determinant(a[0],a[1],a[2],b[0],b[1],b[2],c[0],c[1],c[2]); - } - - template - NT determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c, Vector const&d){ - return determinant( - a[0],a[1],a[2],a[3], - b[0],b[1],b[2],b[3], - c[0],c[1],c[2],c[3], - d[0],d[1],d[2],d[3]); - } - template - typename Sgn::result_type - sign_of_determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c, Vector const&d){ - return sign_of_determinant( - a[0],a[1],a[2],a[3], - b[0],b[1],b[2],b[3], - c[0],c[1],c[2],c[3], - d[0],d[1],d[2],d[3]); - } - - template - NT determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e){ - return determinant( - a[0],a[1],a[2],a[3],a[4], - b[0],b[1],b[2],b[3],b[4], - c[0],c[1],c[2],c[3],c[4], - d[0],d[1],d[2],d[3],d[4], - e[0],e[1],e[2],e[3],e[4]); - } - template - typename Sgn::result_type - sign_of_determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e){ - return sign_of_determinant( - a[0],a[1],a[2],a[3],a[4], - b[0],b[1],b[2],b[3],b[4], - c[0],c[1],c[2],c[3],c[4], - d[0],d[1],d[2],d[3],d[4], - e[0],e[1],e[2],e[3],e[4]); - } - - template - NT determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f){ - return determinant( - a[0],a[1],a[2],a[3],a[4],a[5], - b[0],b[1],b[2],b[3],b[4],b[5], - c[0],c[1],c[2],c[3],c[4],c[5], - d[0],d[1],d[2],d[3],d[4],d[5], - e[0],e[1],e[2],e[3],e[4],e[5], - f[0],f[1],f[2],f[3],f[4],f[5]); - } - template - typename Sgn::result_type - sign_of_determinant_of_vectors(Vector const&a, Vector const&b, - Vector const&c, Vector const&d, Vector const&e, Vector const&f){ - return sign_of_determinant( - a[0],a[1],a[2],a[3],a[4],a[5], - b[0],b[1],b[2],b[3],b[4],b[5], - c[0],c[1],c[2],c[3],c[4],c[5], - d[0],d[1],d[2],d[3],d[4],d[5], - e[0],e[1],e[2],e[3],e[4],e[5], - f[0],f[1],f[2],f[3],f[4],f[5]); - } - -} -#endif diff --git a/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h b/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h deleted file mode 100644 index f411e827..00000000 --- a/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_INTERNAL_COMBINATION_ENUMERATOR_H -#define CGAL_INTERNAL_COMBINATION_ENUMERATOR_H - -#include -#include - -namespace CGAL { - -namespace internal { - -class Combination_enumerator -{ - // types and member data - typedef std::vector Combination; - Combination combi_; - const int k_; - const int min_; - const int max_; - const int max_at_pos_0_; - -public: - - // For generating all the combinations of |k| distinct elements in the - // interval [min, max] (both included) - Combination_enumerator(const int k, const int min, const int max) - : combi_(k), k_(k), min_(min), max_(max), max_at_pos_0_(max + 1 - k) - { - CGAL_assertion_msg( min <= max, "min is larger than max"); - CGAL_assertion_msg( 1 <= k && k <= ( max - min + 1 ), "wrong value of k"); - init(); - } - - Combination_enumerator(const Combination_enumerator & c) - : combi_(c.combi_), k_(c.k_), min_(c.min_), max_(c.max_), max_at_pos_0_(c.max_at_pos_0_) - {} - - int number_of_elements() - { - return k_; - } - - void init() - { - combi_.resize(k_); - for( int i = 0; i < k_; ++i ) - element(i) = min_ + i; - } - - bool end() const - { - return ( element(0) > max_at_pos_0_ ); - } - - int element(const int i) const - { - CGAL_assertion( 0 <= i && i < k_ ); - return combi_[i]; - } - - int & element(const int i) - { - CGAL_assertion( 0 <= i && i < k_ ); - return combi_[i]; - } - - int operator[](const int i) const - { - return element(i); - } - - int & operator[](const int i) - { - return element(i); - } - - void operator++() - { - int i = k_ - 1; - int max_at_pos_i(max_); - while( ( i >= 0 ) && ( element(i) >= max_at_pos_i ) ) - { - --i; - --max_at_pos_i; - } - if( -1 == i ) - { - if( element(0) == max_at_pos_0_ ) - ++element(0); // mark then end of the enumeration with an impossible value - // Note than when we have arrived at the end of the enumeration, applying - // operator++() again does not change anything, so it is safe to - // apply it too many times. - } - else - { - ++element(i); - for( int j = i + 1; j < k_; ++j ) - element(j) = element(i) + j - i; - } - } - - Combination_enumerator operator++(int) - { - Combination_enumerator tmp(*this); - ++(*this); - return tmp; - } - - // - - - - - - - - - - - - - - - - - - - - - - - - - - - TESTING -#if 0 - void test() - { - std::cerr << '\n'; - while( ! end() ) - { - std::cerr << '\n'; - for( int i = 0; i < k_; ++i ) - std::cerr << element(i) << ' '; - ++(*this); - } - init(); - } -#endif -}; - -} // end of namespace internal - -} // end of namespace CGAL - -#endif // CGAL_INTERNAL_COMBINATION_ENUMERATOR_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h b/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h deleted file mode 100644 index ee6195d9..00000000 --- a/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H -#define CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H - -#include -#include -#include -#include - -namespace CGAL { - -namespace internal { - -// Utility for adding one to an Dimension_tag: - -template -struct Dimen_plus_one; - -template<> -struct Dimen_plus_one -{ - typedef Dynamic_dimension_tag type; -}; - -template -struct Dimen_plus_one > -{ - typedef Dimension_tag type; -}; - -// A SMALL CONTAINER UTILITY FOR DYNAMIC/STATIC MEMORY MANAGEMENT - -// stores an array of static or dynamic size, depending on template parameter . - -template< typename Containee, typename D, bool WithCompactContainerHelper = false> - struct S_or_D_array; // S = static, D = dynamic - -// The case of static size: -template< typename Containee, int D, bool WithCompactContainerHelper > -struct S_or_D_array< Containee, Dimension_tag< D >, WithCompactContainerHelper > -: public array -{ - typedef array Base; - S_or_D_array(const int) - : Base() - {} - S_or_D_array(const int, const Containee & c) - : Base() - { - assign(c); - } - void* for_compact_container() const - { - return (*this)[0].for_compact_container(); - } - void* & for_compact_container() - { - return (*this)[0].for_compact_container(); - } -}; - -// The case of dynamic size -template< typename Containee > -struct S_or_D_array< Containee, Dynamic_dimension_tag, false > -: public std::vector -{ - typedef std::vector Base; - // TODO: maybe we should use some "small-vector-optimized" class. - S_or_D_array(const int d) - : Base(d) - {} - S_or_D_array(const int d, const Containee & c) - : Base(d, c) - {} -}; - -// The case of dynamic size with for_compact_container -template< typename Containee > -struct S_or_D_array< Containee, Dynamic_dimension_tag, true > -: public std::vector -{ - typedef std::vector Base; - S_or_D_array(const int d) - : Base(d), fcc_(NULL) - {} - S_or_D_array(const int d, const Containee & c) - : Base(d, c), fcc_(NULL) - {} - void* fcc_; - void* for_compact_container() const { return fcc_; } - void* & for_compact_container() { return fcc_; } -}; - -} // end of namespace internal - -} // end of namespace CGAL - -#endif // CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h deleted file mode 100644 index b3a0ec98..00000000 --- a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H -#define CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H - -namespace CGAL { - -namespace internal { -namespace Triangulation { - -struct Dummy_TDS -{ - struct Vertex {}; - struct Vertex_handle {}; - struct Vertex_iterator {}; - struct Vertex_const_handle {}; - struct Vertex_const_iterator {}; - struct Full_cell {}; - struct Full_cell_handle {}; - struct Full_cell_iterator {}; - struct Full_cell_const_handle {}; - struct Full_cell_const_iterator {}; - struct Vertex_handle_const_iterator {}; - struct Full_cell_data {}; -}; - -} // namespace Triangulation -} // namespace internal - -} //namespace CGAL - -#endif // CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h deleted file mode 100644 index 7e360026..00000000 --- a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus (Well... `copy, paste and hack' of Monique Teillaud's work) - -#ifndef CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H -#define CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H - -namespace CGAL { - -namespace internal { -namespace Triangulation { - -template< typename TDS > -class Triangulation_ds_facet_iterator -{ - typedef typename TDS::Full_cell_handle Full_cell_handle; - typedef typename TDS::Facet Facet; - - typedef Facet value_type; - typedef const Facet * pointer; - typedef const Facet & reference; - typedef std::size_t size_type; - typedef std::ptrdiff_t difference_type; - typedef std::bidirectional_iterator_tag iterator_category; - - typedef Triangulation_ds_facet_iterator Facet_iterator; - - TDS & tds_; - Facet ft_; - const int cur_dim_; - -public: - Triangulation_ds_facet_iterator(TDS & tds) - : tds_(tds), ft_(tds.full_cells_begin(), 0), cur_dim_(tds.current_dimension()) - { - CGAL_assertion( cur_dim_ > 0 ); - while( ! canonical() ) - raw_increment(); - } - - Triangulation_ds_facet_iterator(TDS & tds, int) - : tds_(tds), ft_(tds.full_cells_end(), 0), cur_dim_(tds.current_dimension()) - { - CGAL_assertion( cur_dim_ > 0 ); - CGAL_assertion( canonical() ); - } - - Facet_iterator & operator++() - { - increment(); - return (*this); - } - - Facet_iterator operator++(int) - { - Facet_iterator tmp(*this); - increment(); - return tmp; - } - - Facet_iterator & operator--() - { - decrement(); - return (*this); - } - - Facet_iterator operator--(int) - { - Facet_iterator tmp(*this); - decrement(); - return tmp; - } - - bool operator==(const Facet_iterator & fi) const - { - return (&tds_ == &fi.tds_) && - (tds_.index_of_covertex(ft_) == fi.tds_.index_of_covertex(fi.ft_)) && - (tds_.full_cell(ft_) == fi.tds_.full_cell(fi.ft_)); - } - - bool operator!=(const Facet_iterator & fi) const - { - return !(*this == fi); - } - - reference operator*() const - { - return ft_; - } - - pointer operator->() const - { - return &ft_; - } - -private: - bool canonical() - { - if( tds_.full_cells_end() == tds_.full_cell(ft_) ) - return ( 0 == tds_.index_of_covertex(ft_) ); - return ( tds_.full_cell(ft_) < - tds_.full_cell(ft_)->neighbor(tds_.index_of_covertex(ft_)) ); - } - - void raw_decrement() - { - int i = tds_.index_of_covertex(ft_); - if( i == 0 ) - ft_ = Facet(--tds_.full_cell(ft_), cur_dim_); - else - ft_ = Facet(tds_.full_cell(ft_), i - 1); - } - - void raw_increment() - { - int i = tds_.index_of_covertex(ft_); - if( i == cur_dim_ ) - ft_ = Facet(++tds_.full_cell(ft_), 0); - else - ft_ = Facet(tds_.full_cell(ft_), i + 1); - } - - void decrement() - { - do { raw_decrement(); } while( ! canonical() ); - } - - void increment() - { - do { raw_increment(); } while( ! canonical() ); - } -}; - -} // namespace Triangulation -} // namespace internal - -} //namespace CGAL - -#endif // CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h deleted file mode 100644 index a1ffc775..00000000 --- a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). -// All rights reserved. -// -// This file is part of CGAL (www.cgal.org). -// You can redistribute it and/or modify it under the terms of the GNU -// General Public License as published by the Free Software Foundation, -// either version 3 of the License, or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Samuel Hornus - -#ifndef CGAL_INTERNAL_TRIANGULATION_UTILITIES_H -#define CGAL_INTERNAL_TRIANGULATION_UTILITIES_H - -#include - -namespace CGAL { - -namespace internal { -namespace Triangulation { - -template< class TDS > -struct Dark_full_cell_data -{ - typedef typename TDS::Full_cell_handle Full_cell_handle; - Full_cell_handle light_copy_; - int count_; - Dark_full_cell_data() : light_copy_(), count_(0) {} -}; - -template< class TDS > -struct Compare_faces_with_common_first_vertex -{ - typedef typename TDS::Face Face; - - const int d_; - -public: - - Compare_faces_with_common_first_vertex(const int d) - : d_(d) - { - CGAL_assertion( 0 < d ); - } - - explicit Compare_faces_with_common_first_vertex(); - - bool operator()(const Face & left, const Face & right) const - { - CGAL_assertion( d_ == left.face_dimension() ); - CGAL_assertion( d_ == right.face_dimension() ); - for( int i = 1; i <= d_; ++i ) - { - if( left.vertex(i) < right.vertex(i) ) - return true; - if( right.vertex(i) < left.vertex(i) ) - return false; - } - return false; - } -}; - -template< class T > -struct Compare_vertices_for_upper_face -{ - typedef typename T::Vertex_const_handle VCH; - - const T & t_; - -public: - - Compare_vertices_for_upper_face(const T & t) - : t_(t) - {} - - explicit Compare_vertices_for_upper_face(); - - bool operator()(const VCH & left, const VCH & right) const - { - if( left == right ) - return false; - if( t_.is_infinite(left) ) - return true; - if( t_.is_infinite(right) ) - return false; - return left < right; - } -}; - -template< class T > -struct Compare_points_for_perturbation -{ - typedef typename T::Geom_traits::Point_d Point; - - const T & t_; - -public: - - Compare_points_for_perturbation(const T & t) - : t_(t) - {} - - explicit Compare_points_for_perturbation(); - - bool operator()(const Point * left, const Point * right) const - { - return (SMALLER == t_.geom_traits().compare_lexicographically_d_object()(*left, *right)); - } -}; - -template< class T > -struct Point_from_pointer -{ - typedef const typename T::Geom_traits::Point_d * argument_type; - typedef const typename T::Geom_traits::Point_d result_type; - result_type & operator()(argument_type & x) const - { - return (*x); - } - const result_type & operator()(const argument_type & x) const - { - return (*x); - } -}; - -template< typename Vertex_handle, typename Point > -struct Point_from_vertex_handle -{ - typedef Vertex_handle argument_type; - typedef Point result_type; - result_type & operator()(argument_type & x) const - { - return x->point(); - } - const result_type & operator()(const argument_type & x) const - { - return x->point(); - } -}; - -} // namespace Triangulation -} // namespace internal - -} //namespace CGAL - -#endif // CGAL_INTERNAL_TRIANGULATION_UTILITIES_H diff --git a/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h b/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h deleted file mode 100644 index 110bb4be..00000000 --- a/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_ITERATOR_FROM_INDICES_H -#define CGAL_ITERATOR_FROM_INDICES_H -#include -#include -namespace CGAL { -template -struct Default_coordinate_access { - typedef Ref_ result_type; - template Ref_ operator()(T const& t, std::ptrdiff_t i)const{ - return t[i]; - } -}; - -//TODO: default type for Value_: typename same_cv::type::value_type>::type -template ()[0]) -#else - Value_& -#endif - , class Coord_access = Default_coordinate_access - > -class Iterator_from_indices -: public boost::iterator_facade, - Value_, std::bidirectional_iterator_tag, Ref_> -{ - friend class boost::iterator_core_access; - //FIXME: use int to save space - //TODO: use a tuple to save space when Coord_access is empty - typedef std::ptrdiff_t index_t; - Container_* cont; - index_t index; - Coord_access ca; - void increment(){ ++index; } - void decrement(){ --index; } - void advance(std::ptrdiff_t n){ index+=n; } - ptrdiff_t distance_to(Iterator_from_indices const& other)const{ - return other.index-index; - } - bool equal(Iterator_from_indices const& other)const{ - return index==other.index; - } - Ref_ dereference()const{ - //FIXME: use the functor properly - //Uh, and what did I mean by that? - return ca(*cont,index); - } - public: - Iterator_from_indices(Container_& cont_,std::size_t n) - : cont(&cont_), index(n) {} - template - Iterator_from_indices(Container_& cont_,std::size_t n,T const&t) - : cont(&cont_), index(n), ca(t) {} -}; -} -#endif // CGAL_ITERATOR_FROM_INDICES_H diff --git a/src/common/include/gudhi_patches/CGAL/transforming_iterator.h b/src/common/include/gudhi_patches/CGAL/transforming_iterator.h deleted file mode 100644 index 15ea19a5..00000000 --- a/src/common/include/gudhi_patches/CGAL/transforming_iterator.h +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_TRANSFORMING_ITERATOR_H -#define CGAL_TRANSFORMING_ITERATOR_H -#include -#include -#include -#include -#include -#include -#include -#include -#include - -// Inspired by the boost version, but more compact and -// without any iterator_category games. - -namespace CGAL { -namespace internal { - -// non-empty case -template::value> struct Functor_as_base { - Functor_as_base(){} - Functor_as_base(T const& t):f(t){} - //template Functor_as_base(Functor_as_base const&g):f(g.functor()){} - T const& functor()const{return f;} - T & functor() {return f;} - private: - T f; -}; - -// empty case -template struct Functor_as_base : public T { - Functor_as_base(){} - Functor_as_base(T const& t):T(t){} - //template Functor_as_base(Functor_as_base const&g):T(g.functor()){} - T const& functor()const{return *this;} - T & functor() {return *this;} -}; - -template -class transforming_iterator_helper -{ - typedef std::iterator_traits Iter_traits; - typedef typename Iter_traits::reference Iter_ref; - typedef typename Default::Get()(std::declval())) -#else - typename boost::result_of::type - // should be reference instead of value_type -#endif - >::type reference_; - - typedef typename Default::Get::type>::type>::type value_type; - - // Crappy heuristic. If we have *it that returns a Weighted_point and F that returns a reference to the Point contained in the Weighted_point it takes as argument, we do NOT want the transformed iterator to return a reference to the temporary *it. On the other hand, if *it returns an int n, and F returns a reference to array[n] it is not so good to lose the reference. This probably should be done elsewhere and should at least be made optional... - typedef typename boost::mpl::if_< - boost::mpl::or_, - boost::is_integral >, - reference_, value_type>::type reference; - - public: - typedef boost::iterator_adaptor< - Derived, - Iter, - value_type, - typename Iter_traits::iterator_category, - reference - > type; -}; -} - -template -class transforming_iterator -: public internal::transforming_iterator_helper,F,Iter,Ref,Val>::type, -private internal::Functor_as_base -{ - friend class boost::iterator_core_access; - typedef typename internal::transforming_iterator_helper::type Base; - typedef internal::Functor_as_base Functor_base; - typename Base::reference dereference()const{ - return functor()(*this->base_reference()); - } - public: - using Functor_base::functor; - transforming_iterator(){} - explicit transforming_iterator(Iter i,F const& f=F()) - :Base(i),Functor_base(f){} - template - transforming_iterator( - transforming_iterator const&i, - typename boost::enable_if_convertible::type* = 0, - typename boost::enable_if_convertible::type* = 0) - : Base(i.base()),Functor_base(i.functor()) {} - -}; - -template inline -transforming_iterator make_transforming_iterator(Iter i, F const&f=F()) { - return transforming_iterator(i,f); -} - -} - -#endif // CGAL_TRANSFORMING_ITERATOR_H diff --git a/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h b/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h deleted file mode 100644 index 48dac132..00000000 --- a/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_TRANSFORMING_PAIR_ITERATOR_H -#define CGAL_TRANSFORMING_PAIR_ITERATOR_H -// Should be a combination of transform_iterator and zip_iterator, -// but boost's iterator_category games are a pain. - -#include -#include -#include - - - - -namespace CGAL { -namespace internal { -template ::value> -struct Min_category { - CGAL_static_assertion((boost::is_convertible::value)); - typedef Cat1 type; -}; - -template -struct Min_category { - typedef Cat2 type; -}; - - -template -class transforming_pair_iterator_helper -{ - typedef typename Min_category< - typename std::iterator_traits::iterator_category, - typename std::iterator_traits::iterator_category> - ::type iterator_category; - - typedef typename Default::Get()(std::declval::reference>(),std::declval::reference>())) -#else - typename boost::result_of::value_type,typename std::iterator_traits::value_type)>::type - // should be reference instead of value_type -#endif - >::type reference; - - typedef typename Default::Get::type>::type>::type value_type; - - public: - typedef boost::iterator_facade< - Derived, - value_type, - iterator_category, - reference - // expect ptrdiff_t is good enough for difference - > type; -}; -} - -template -class transforming_pair_iterator -: public internal::transforming_pair_iterator_helper,F,It1,It2,Ref,Val>::type, -private internal::Functor_as_base -{ - It1 iter1; It2 iter2; - friend class boost::iterator_core_access; - typedef typename internal::transforming_pair_iterator_helper::type Base; - typedef internal::Functor_as_base Functor_base; - typename Base::reference dereference()const{ - return functor()(*iter1,*iter2); - } - bool equal(transforming_pair_iterator const&i)const{ - bool b=(iter1==i.iter1); - CGAL_assertion(b==(iter2==i.iter2)); - //FIXME: or do we want only one driving iterator - return b; - } - void increment(){ ++iter1; ++iter2; } - void decrement(){ --iter1; --iter2; } - void advance(std::ptrdiff_t n){ - std::advance(iter1,n); - std::advance(iter2,n); - } - std::ptrdiff_t distance_to(transforming_pair_iterator const&i)const{ - std::ptrdiff_t dist=std::distance(iter1,i.iter1); - CGAL_assertion(dist==std::distance(iter2,i.iter2)); - return dist; - } - public: - using Functor_base::functor; - transforming_pair_iterator(){} - explicit transforming_pair_iterator(It1 i1,It2 i2,F const& f=F()) - :Functor_base(f),iter1(i1),iter2(i2){} - template - transforming_pair_iterator( - transforming_pair_iterator const&i, - typename boost::enable_if_convertible::type* = 0, - typename boost::enable_if_convertible::type* = 0, - typename boost::enable_if_convertible::type* = 0) - : Functor_base(i.functor()),iter1(i.iter1),iter2(i.iter2) {} - -}; - -template inline -transforming_pair_iterator make_transforming_pair_iterator(It1 i1, It2 i2, F const&f=F()) { - return transforming_pair_iterator(i1,i2,f); -} - -} - -#endif // CGAL_TRANSFORMING_PAIR_ITERATOR_H diff --git a/src/common/include/gudhi_patches/CGAL/typeset.h b/src/common/include/gudhi_patches/CGAL/typeset.h deleted file mode 100644 index d4e24281..00000000 --- a/src/common/include/gudhi_patches/CGAL/typeset.h +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright (c) 2014 -// INRIA Saclay-Ile de France (France) -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Marc Glisse - -#ifndef CGAL_TYPESET_H -#define CGAL_TYPESET_H -#include -#ifdef CGAL_CXX11 -#include -#else -#include -#endif - -// Sometimes using tuple just to list types is overkill (takes forever to -// instantiate). - -namespace CGAL { -#ifdef CGAL_CXX11 - template struct typeset; - template struct typeset { - typedef H head; - typedef typeset tail; - typedef typeset type; - template using contains = typename - std::conditional< - std::is_same::value, - std::true_type, - typename tail::template contains - >::type; - template using add = typename - std::conditional< - contains::value, - typeset, - typeset - >::type; - }; - template<> struct typeset<> { - typedef typeset type; - template using contains = std::false_type; - template using add = typeset; - }; -#else - template struct typeset; - template, void, typeset >::type > - struct typeset { - typedef typeset type; - typedef H head; - typedef T tail; - template struct contains : - boost::mpl::if_,boost::true_type,typename tail::template contains >::type - {}; - template struct add; - //boost::mpl::if_,typeset,typeset >::type - }; - template<> struct typeset<> { - typedef typeset type; - template struct contains : boost::false_type {}; - template struct add : CGAL::typeset {}; - }; - - template - template - struct typeset::add : typeset::type> {}; - template - template - struct typeset::add : typeset {}; -#endif - - template struct typeset_union_ : - typeset_union_::type, typename T2::tail> - {}; - template struct typeset_union_ > : T {}; - - template - struct typeset_intersection_ { - typedef typename T1::head H; - typedef typename typeset_intersection_::type U; - typedef typename -#ifdef CGAL_CXX11 - std::conditional::value, -#else - boost::mpl::if_, -#endif - typename U::template add::type, U>::type type; - }; - template - struct typeset_intersection_,T> : typeset<> {}; - -#ifdef CGAL_CXX11 - template - using typeset_union = typename typeset_union_::type; - template - using typeset_intersection = typename typeset_intersection_::type; -#else - template - struct typeset_union : typeset_union_::type {}; - template - struct typeset_intersection : typeset_intersection_::type {}; -#endif -} -#endif diff --git a/src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt b/src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt deleted file mode 100644 index 5b9581a0..00000000 --- a/src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt +++ /dev/null @@ -1,82 +0,0 @@ -CGAL/Regular_triangulation_traits_adapter.h -CGAL/Triangulation_ds_vertex.h -CGAL/Triangulation_data_structure.h -CGAL/transforming_pair_iterator.h -CGAL/NewKernel_d/static_int.h -CGAL/NewKernel_d/Cartesian_LA_functors.h -CGAL/NewKernel_d/Cartesian_change_FT.h -CGAL/NewKernel_d/Wrapper/Vector_d.h -CGAL/NewKernel_d/Wrapper/Hyperplane_d.h -CGAL/NewKernel_d/Wrapper/Ref_count_obj.h -CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h -CGAL/NewKernel_d/Wrapper/Point_d.h -CGAL/NewKernel_d/Wrapper/Segment_d.h -CGAL/NewKernel_d/Wrapper/Weighted_point_d.h -CGAL/NewKernel_d/Wrapper/Sphere_d.h -CGAL/NewKernel_d/Cartesian_per_dimension.h -CGAL/NewKernel_d/Kernel_object_converter.h -CGAL/NewKernel_d/KernelD_converter.h -CGAL/NewKernel_d/Vector/sse2.h -CGAL/NewKernel_d/Vector/avx4.h -CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h -CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h -CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h -CGAL/NewKernel_d/Vector/array.h -CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h -CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h -CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h -CGAL/NewKernel_d/Vector/vector.h -CGAL/NewKernel_d/Vector/v2int.h -CGAL/NewKernel_d/Vector/mix.h -CGAL/NewKernel_d/Cartesian_static_filters.h -CGAL/NewKernel_d/Cartesian_LA_base.h -CGAL/NewKernel_d/Lazy_cartesian.h -CGAL/NewKernel_d/Coaffine.h -CGAL/NewKernel_d/store_kernel.h -CGAL/NewKernel_d/Dimension_base.h -CGAL/NewKernel_d/Kernel_3_interface.h -CGAL/NewKernel_d/Cartesian_complete.h -CGAL/NewKernel_d/Cartesian_base.h -CGAL/NewKernel_d/Cartesian_filter_K.h -CGAL/NewKernel_d/functor_tags.h -CGAL/NewKernel_d/Filtered_predicate2.h -CGAL/NewKernel_d/functor_properties.h -CGAL/NewKernel_d/Define_kernel_types.h -CGAL/NewKernel_d/LA_eigen/LA.h -CGAL/NewKernel_d/LA_eigen/constructors.h -CGAL/NewKernel_d/Types/Aff_transformation.h -CGAL/NewKernel_d/Types/Sphere.h -CGAL/NewKernel_d/Types/Hyperplane.h -CGAL/NewKernel_d/Types/Line.h -CGAL/NewKernel_d/Types/Ray.h -CGAL/NewKernel_d/Types/Iso_box.h -CGAL/NewKernel_d/Types/Weighted_point.h -CGAL/NewKernel_d/Types/Segment.h -CGAL/NewKernel_d/Kernel_d_interface.h -CGAL/NewKernel_d/utils.h -CGAL/NewKernel_d/Kernel_2_interface.h -CGAL/NewKernel_d/Cartesian_filter_NT.h -CGAL/NewKernel_d/function_objects_cartesian.h -CGAL/Convex_hull.h -CGAL/Triangulation_ds_full_cell.h -CGAL/Regular_triangulation.h -CGAL/Epick_d.h -CGAL/transforming_iterator.h -CGAL/iterator_from_indices.h -CGAL/Delaunay_triangulation.h -CGAL/IO/Triangulation_off_ostream.h -CGAL/typeset.h -CGAL/Triangulation_full_cell.h -CGAL/Triangulation.h -CGAL/internal/Static_or_dynamic_array.h -CGAL/internal/Combination_enumerator.h -CGAL/internal/Triangulation/utilities.h -CGAL/internal/Triangulation/Triangulation_ds_iterators.h -CGAL/internal/Triangulation/Dummy_TDS.h -CGAL/argument_swaps.h -CGAL/Epeck_d.h -CGAL/determinant_of_vectors.h -CGAL/TDS_full_cell_default_storage_policy.h -CGAL/TDS_full_cell_mirror_storage_policy.h -CGAL/Triangulation_face.h -CGAL/Triangulation_vertex.h diff --git a/src/common/utilities/CMakeLists.txt b/src/common/utilities/CMakeLists.txt index 7f1d1cd7..3dcfe84d 100644 --- a/src/common/utilities/CMakeLists.txt +++ b/src/common/utilities/CMakeLists.txt @@ -1,6 +1,6 @@ project(off_file_from_shape_generator) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( off_file_from_shape_generator off_file_from_shape_generator.cpp ) add_test(NAME off_file_from_shape_generator_on_sphere_1000_3_15.2 COMMAND $ "on" "sphere" "onSphere.off" "1000" "3" "15.2") @@ -13,4 +13,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) install(TARGETS off_file_from_shape_generator DESTINATION bin) -endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index d4ace20e..af71bb3b 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -91,7 +91,7 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ") endif (EIGEN3_FOUND) - if (NOT CGAL_VERSION VERSION_LESS 4.8.1) + if (NOT CGAL_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/bottleneck_distance.pyx'") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}bottleneck_distance;") set(GUDHI_CYTHON_NERVE_GIC "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/nerve_gic.pyx'") @@ -100,7 +100,7 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}bottleneck_distance;") set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}nerve_gic;") endif () - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_SUBSAMPLING "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/subsampling.pyx'") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}subsampling;") set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/tangential_complex.pyx'") @@ -109,13 +109,13 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}subsampling;") set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}tangential_complex;") endif () - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_ALPHA_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/alpha_complex.pyx'") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}alpha_complex;") else() set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}alpha_complex;") endif () - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_witness_complex.pyx'\ninclude '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_strong_witness_complex.pyx'\n") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}euclidean_witness_complex;") @@ -224,7 +224,7 @@ if(PYTHONINTERP_FOUND) install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py install)") # Test examples - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) # Bottleneck and Alpha add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} @@ -259,8 +259,8 @@ if(PYTHONINTERP_FOUND) # Subsampling add_gudhi_py_test(test_subsampling) - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) - if (NOT CGAL_VERSION VERSION_LESS 4.8.1) + endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + if (NOT CGAL_VERSION VERSION_LESS 4.11.0) # Bottleneck add_test(NAME bottleneck_basic_example_py_test WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} @@ -299,9 +299,9 @@ if(PYTHONINTERP_FOUND) -f human.off -n 700 -v) add_gudhi_py_test(test_cover_complex) - endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) + endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) # Alpha add_test(NAME alpha_complex_from_points_example_py_test WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} @@ -318,13 +318,13 @@ if(PYTHONINTERP_FOUND) add_gudhi_py_test(test_alpha_complex) - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0) + endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) # Euclidean witness add_gudhi_py_test(test_euclidean_witness_complex) - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0) + endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) # Cubical add_test(NAME periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test @@ -389,7 +389,7 @@ if(PYTHONINTERP_FOUND) if(MATPLOTLIB_FOUND) if(NUMPY_FOUND) if(SCIPY_FOUND) - if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) + if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/") # User warning - Sphinx is a static pages generator, and configured to work fine with user_version # Images and biblio warnings because not found on developper version @@ -411,10 +411,10 @@ if(PYTHONINTERP_FOUND) # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES") - else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) + else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) message("++ Python documentation module will not be compiled because it requires a CGAL with Eigen3 version greater or equal than 4.8.1") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) + endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) else(SCIPY_FOUND) message("++ Python documentation module will not be compiled because scipy was not found") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") diff --git a/src/cython/doc/alpha_complex_sum.inc b/src/cython/doc/alpha_complex_sum.inc index 806988bb..9049e654 100644 --- a/src/cython/doc/alpha_complex_sum.inc +++ b/src/cython/doc/alpha_complex_sum.inc @@ -1,20 +1,20 @@ .. table:: :widths: 30 50 20 - +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------+ - | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | - | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | - | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | The filtration value of each simplex is computed as the square of the | | - | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: GPL v3 | - | | is then said to be Gabriel), and as the minimum of the filtration | | - | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: Eigen3 and CGAL :math:`\geq` 4.7.0 | - | | otherwise. All simplices that have a filtration value strictly | | - | | greater than a given alpha squared value are not inserted into the | | - | | complex. | | - | | | | - | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | - | | advised for better performance). | | - +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------+ - | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | - +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ + | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | + | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | + | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The filtration value of each simplex is computed as the square of the | | + | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: MIT (`GPL v3 `_) | + | | is then said to be Gabriel), and as the minimum of the filtration | | + | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 | + | | otherwise. All simplices that have a filtration value strictly | | + | | greater than a given alpha squared value are not inserted into the | | + | | complex. | | + | | | | + | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | + | | advised for better performance). | | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ + | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/bottleneck_distance_sum.inc b/src/cython/doc/bottleneck_distance_sum.inc index 6840e838..6eb0ac19 100644 --- a/src/cython/doc/bottleneck_distance_sum.inc +++ b/src/cython/doc/bottleneck_distance_sum.inc @@ -1,14 +1,14 @@ .. table:: :widths: 30 50 20 - +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ - | .. figure:: | Bottleneck distance measures the similarity between two persistence | :Author: François Godi | - | ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | | - | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 | - | | diagonal points) such that any couple of matched points are at | | - | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :Copyright: GPL v3 | - | the longest edge | norm in :math:`\mathbb{R}^2`. | | - | | | :Requires: CGAL :math:`\geq` 4.8.0 | - +-----------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------+ - | * :doc:`bottleneck_distance_user` | | - +-----------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------+ + +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+ + | .. figure:: | Bottleneck distance measures the similarity between two persistence | :Author: François Godi | + | ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | | + | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 | + | | diagonal points) such that any couple of matched points are at | | + | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :Copyright: MIT (`GPL v3 `_) | + | the longest edge | norm in :math:`\mathbb{R}^2`. | | + | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | + +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+ + | * :doc:`bottleneck_distance_user` | | + +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/conf.py b/src/cython/doc/conf.py index ce08f679..e4c718c3 100755 --- a/src/cython/doc/conf.py +++ b/src/cython/doc/conf.py @@ -62,7 +62,7 @@ import gudhi # General information about the project. project = gudhi.__name__ -copyright = gudhi.__copyright__ + ' - ' + gudhi.__license__ +copyright = gudhi.__copyright__ + ' - MIT' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/src/cython/doc/cubical_complex_sum.inc b/src/cython/doc/cubical_complex_sum.inc index 6dcf8e48..f200e695 100644 --- a/src/cython/doc/cubical_complex_sum.inc +++ b/src/cython/doc/cubical_complex_sum.inc @@ -6,7 +6,7 @@ | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | | | :alt: Cubical complex representation | analysis. | :Introduced in: GUDHI 2.0.0 | | :figclass: align-center | | | - | | | :Copyright: GPL v3 | + | | | :Copyright: MIT | | | | | +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ | * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | diff --git a/src/cython/doc/nerve_gic_complex_sum.inc b/src/cython/doc/nerve_gic_complex_sum.inc index 0e606fe1..d633c4ff 100644 --- a/src/cython/doc/nerve_gic_complex_sum.inc +++ b/src/cython/doc/nerve_gic_complex_sum.inc @@ -1,16 +1,16 @@ .. table:: :widths: 30 50 20 - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière | - | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | | - | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 | - | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | | - | | the image of a scalar-valued function defined on the data. | :Copyright: GPL v3 | - | | | | - | | | :Requires: CGAL :math:`\geq` 4.8.1 | - | | | | - | | | | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | * :doc:`nerve_gic_complex_user` | * :doc:`nerve_gic_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière | + | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | | + | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 | + | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | | + | | the image of a scalar-valued function defined on the data. | :Copyright: MIT (`GPL v3 `_) | + | | | | + | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | + | | | | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | * :doc:`nerve_gic_complex_user` | * :doc:`nerve_gic_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/persistence_graphical_tools_sum.inc b/src/cython/doc/persistence_graphical_tools_sum.inc index b412de56..0cdf8072 100644 --- a/src/cython/doc/persistence_graphical_tools_sum.inc +++ b/src/cython/doc/persistence_graphical_tools_sum.inc @@ -6,7 +6,7 @@ | img/graphical_tools_representation.png | the user to build easily persistence barcode, diagram or density. | | | | | :Introduced in: GUDHI 2.0.0 | | | | | - | | | :Copyright: GPL v3 | + | | | :Copyright: MIT | | | | | | | | :Requires: matplotlib, numpy and scipy | +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ diff --git a/src/cython/doc/persistent_cohomology_sum.inc b/src/cython/doc/persistent_cohomology_sum.inc index 20ca073c..4d7b077e 100644 --- a/src/cython/doc/persistent_cohomology_sum.inc +++ b/src/cython/doc/persistent_cohomology_sum.inc @@ -6,7 +6,7 @@ | ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | | | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Introduced in: GUDHI 2.0.0 | | | homology studies the evolution -- birth, life and death -- of these | | - | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :Copyright: GPL v3 | + | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :Copyright: MIT | | Torus | theory is essentially composed of three elements: topological spaces, | | | | their homology groups and an evolution scheme. | | | | | | diff --git a/src/cython/doc/rips_complex_sum.inc b/src/cython/doc/rips_complex_sum.inc index e8e505e2..857c6893 100644 --- a/src/cython/doc/rips_complex_sum.inc +++ b/src/cython/doc/rips_complex_sum.inc @@ -6,7 +6,7 @@ | ../../doc/Rips_complex/rips_complex_representation.png | graph. | | | :figclass: align-center | | :Introduced in: GUDHI 2.0.0 | | | The filtration value of each edge is computed from a user-given | | - | | distance function and is inserted until a user-given threshold | :Copyright: GPL v3 | + | | distance function and is inserted until a user-given threshold | :Copyright: MIT | | | value. | | | | | | | | This complex can be built from a point cloud and a distance function, | | diff --git a/src/cython/doc/simplex_tree_sum.inc b/src/cython/doc/simplex_tree_sum.inc index 086c69d5..5ba58d2b 100644 --- a/src/cython/doc/simplex_tree_sum.inc +++ b/src/cython/doc/simplex_tree_sum.inc @@ -6,7 +6,7 @@ | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | | | :alt: Simplex tree representation | | :Introduced in: GUDHI 2.0.0 | | :figclass: align-center | The data structure is described in | | - | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: GPL v3 | + | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: MIT | | | | | +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` | diff --git a/src/cython/doc/tangential_complex_sum.inc b/src/cython/doc/tangential_complex_sum.inc index 0f03ffb3..c8bc1177 100644 --- a/src/cython/doc/tangential_complex_sum.inc +++ b/src/cython/doc/tangential_complex_sum.inc @@ -1,14 +1,14 @@ .. table:: :widths: 30 50 20 - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | - | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | - | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 | - | | an unknown manifold. The running time depends only linearly on the | | - | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: GPL v3 | - | | dimension :math:`k`. | | - | | | :Requires: CGAL :math:`\geq` 4.8.0 | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------+ - | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | + | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | + | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 | + | | an unknown manifold. The running time depends only linearly on the | | + | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: MIT (`GPL v3 `_) | + | | dimension :math:`k`. | | + | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/witness_complex_sum.inc b/src/cython/doc/witness_complex_sum.inc index 49577745..8c89b2dd 100644 --- a/src/cython/doc/witness_complex_sum.inc +++ b/src/cython/doc/witness_complex_sum.inc @@ -1,17 +1,18 @@ .. table:: :widths: 30 50 20 - +-------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------------------------------------+ - | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich | - | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | - | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | The data structure is described in | | - | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: GPL v3 | - | | | | - | | | :Requires: Eigen3 and CGAL :math:`\geq` 4.6.0 for Euclidean versions only | - +-------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------------------------------------------------------+ - | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | - | | * :doc:`strong_witness_complex_ref` | - | | * :doc:`euclidean_witness_complex_ref` | - | | * :doc:`euclidean_strong_witness_complex_ref` | - +-------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------+ + +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ + | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich | + | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | + | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The data structure is described in | | + | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 `_ for Euclidean versions only) | + | | | | + | | | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 for Euclidean versions only | + +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ + | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | + | | * :doc:`strong_witness_complex_ref` | + | | * :doc:`euclidean_witness_complex_ref` | + | | * :doc:`euclidean_strong_witness_complex_ref` | + +-------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in index 723a31ad..1c380308 100644 --- a/src/cython/gudhi.pyx.in +++ b/src/cython/gudhi.pyx.in @@ -9,8 +9,8 @@ """ __author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__copyright__ = "Copyright (C) 2016-2019 Inria" +__license__ = "https://gudhi.inria.fr/licensing/" __version__ = "@GUDHI_VERSION@" # This variable is used by doctest to find files __root_source_dir__ = "@CMAKE_SOURCE_DIR@" -- cgit v1.2.3 From 95e3e39f346301ed5899d7763570e206e4836652 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 14 Jun 2019 16:38:20 +0200 Subject: Fix bad python3 namespace in doxygen --- src/Doxyfile.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Doxyfile.in b/src/Doxyfile.in index bb53d1ae..59f864a0 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -784,7 +784,7 @@ EXCLUDE = data/ \ example/ \ GudhUI/ \ cmake/ \ - src/cython/ \ + cython/ \ README.md # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or -- cgit v1.2.3 From de061dd46654104d4d74c69484d0207ecef88d25 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 14 Jun 2019 16:38:56 +0200 Subject: Fix CGAL version in example installation section --- src/common/doc/installation.h | 14 +++----------- src/cython/doc/installation.rst | 15 ++------------- 2 files changed, 5 insertions(+), 24 deletions(-) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 5d581b08..2629d12c 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -64,18 +64,16 @@ make doxygen * library which provides easy access to efficient and reliable geometric algorithms. * * \note There is no need to install CGAL, you can just cmake . && make CGAL (or even - * cmake -DCGAL_HEADER_ONLY=ON . for CGAL version ≥ 4.8.0), thereafter you will be able to compile + * cmake -DCGAL_HEADER_ONLY=ON .), thereafter you will be able to compile * GUDHI by calling cmake -DCGAL_DIR=/your/path/to/CGAL-X.Y .. && make * - * Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to + * The procedure to install this library according to * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html * * The following examples/utilities require the Computational Geometry Algorithms - * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed: + * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL version 4.11.0 or higher is not installed: * \li * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp - * - * The following examples/utilities require CGAL version ≥ 4.6.0: * \li * Witness_complex/strong_witness_persistence.cpp * \li @@ -86,8 +84,6 @@ make doxygen * Witness_complex/example_witness_complex_off.cpp * \li * Witness_complex/example_witness_complex_sphere.cpp - * - * The following example requires CGAL version ≥ 4.7.0: * \li * Alpha_complex/Alpha_complex_from_off.cpp * \li @@ -96,8 +92,6 @@ make doxygen * Alpha_complex/alpha_complex_persistence.cpp * \li * Persistent_cohomology/custom_persistence_sort.cpp - * - * The following example requires CGAL version ≥ 4.8.1: * \li * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp * \li @@ -126,8 +120,6 @@ make doxygen * Tangential_complex/example_basic.cpp * \li * Tangential_complex/example_with_perturb.cpp - * - * The following example requires CGAL version ≥ 4.11.0: * \li * Alpha_complex/Weighted_alpha_complex_3d_from_points.cpp * \li diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst index 855dea44..02b889d0 100644 --- a/src/cython/doc/installation.rst +++ b/src/cython/doc/installation.rst @@ -151,29 +151,18 @@ The :doc:`Alpha complex `, C++ library which provides easy access to efficient and reliable geometric algorithms. -Having CGAL, the Computational Geometry Algorithms Library, version 4.7.0 or -higher installed is recommended. The procedure to install this library +The procedure to install this library according to your operating system is detailed `here `_. -The following examples requires CGAL version ≥ 4.7.0: +The following examples requires CGAL version ≥ 4.11.0: .. only:: builder_html * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>` - -The following examples requires CGAL version ≥ 4.8.0: - -.. only:: builder_html - * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>` * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - -The following examples requires CGAL version ≥ 4.8.1: - -.. only:: builder_html - * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` -- cgit v1.2.3 From a7ae66a868d655a9bd76b208327f00cda9089df6 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 14 Jun 2019 16:54:02 +0200 Subject: Remove homsimpl as it is GPL --- src/GudhUI/utils/homsimpl | Bin 118624 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100755 src/GudhUI/utils/homsimpl diff --git a/src/GudhUI/utils/homsimpl b/src/GudhUI/utils/homsimpl deleted file mode 100755 index 12227502..00000000 Binary files a/src/GudhUI/utils/homsimpl and /dev/null differ -- cgit v1.2.3 From 64d101a24ddcecd84771cfa4dd536ee46f226bf8 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 14 Jun 2019 19:33:15 +0200 Subject: Add CGAL dependency for Contraction instead of including CGAL files --- src/Contraction/example/CMakeLists.txt | 22 ++--- .../CGAL_queue/Modifiable_priority_queue.h | 101 --------------------- .../include/gudhi/Contraction/Edge_profile.h | 1 + .../include/gudhi/Skeleton_blocker_contractor.h | 5 +- src/common/doc/main_page.md | 8 +- 5 files changed, 18 insertions(+), 119 deletions(-) delete mode 100644 src/Contraction/include/gudhi/Contraction/CGAL_queue/Modifiable_priority_queue.h diff --git a/src/Contraction/example/CMakeLists.txt b/src/Contraction/example/CMakeLists.txt index 582b7ab8..f0dc885d 100644 --- a/src/Contraction/example/CMakeLists.txt +++ b/src/Contraction/example/CMakeLists.txt @@ -1,17 +1,17 @@ project(Contraction_examples) -add_executable(RipsContraction Rips_contraction.cpp) +if (NOT CGAL_VERSION VERSION_LESS 4.11.0) + add_executable(RipsContraction Rips_contraction.cpp) -add_executable(GarlandHeckbert Garland_heckbert.cpp) -target_link_libraries(GarlandHeckbert ${Boost_TIMER_LIBRARY}) + add_executable(GarlandHeckbert Garland_heckbert.cpp) + target_link_libraries(GarlandHeckbert ${Boost_TIMER_LIBRARY}) -add_test(NAME Contraction_example_tore3D_0.2 COMMAND $ + add_test(NAME Contraction_example_tore3D_0.2 COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0.2") -# TODO(DS) : These tests are too long under Windows -#add_test(NAME Contraction_example_sphere_0.2 COMMAND $ -# "${CMAKE_SOURCE_DIR}/data/points/sphere3D_2646.off" "0.2") -#add_test(NAME Contraction_example_SO3_0.3 COMMAND $ -# "${CMAKE_SOURCE_DIR}/data/points/SO3_10000.off" "0.3") + # TODO(DS) : These tests are too long under Windows + #add_test(NAME Contraction_example_sphere_0.2 COMMAND $ + # "${CMAKE_SOURCE_DIR}/data/points/sphere3D_2646.off" "0.2") + #add_test(NAME Contraction_example_SO3_0.3 COMMAND $ + # "${CMAKE_SOURCE_DIR}/data/points/SO3_10000.off" "0.3") -install(TARGETS RipsContraction DESTINATION bin) -install(TARGETS GarlandHeckbert DESTINATION bin) +endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) diff --git a/src/Contraction/include/gudhi/Contraction/CGAL_queue/Modifiable_priority_queue.h b/src/Contraction/include/gudhi/Contraction/CGAL_queue/Modifiable_priority_queue.h deleted file mode 100644 index 5a55c513..00000000 --- a/src/Contraction/include/gudhi/Contraction/CGAL_queue/Modifiable_priority_queue.h +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright (c) 2006-2011 GeometryFactory (France). All rights reserved. -// -// This file is part of CGAL (www.cgal.org); you can redistribute it and/or -// modify it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 3 of the License, -// or (at your option) any later version. -// -// Licensees holding a valid commercial license may use this file in -// accordance with the commercial license agreement provided with the software. -// -// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE -// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. -// -// $URL$ -// $Id$ -// -// Author(s) : Fernando Cacciola -// -#ifndef CONTRACTION_CGAL_QUEUE_MODIFIABLE_PRIORITY_QUEUE_H_ -#define CONTRACTION_CGAL_QUEUE_MODIFIABLE_PRIORITY_QUEUE_H_ - -#define CGAL_SURFACE_MESH_SIMPLIFICATION_USE_RELAXED_HEAP - -#include -#include - -#include // Neeeded by the following Boost header for CHAR_BIT. -#include // for less - -namespace CGAL { - -template , class ID_ = boost::identity_property_map> -class Modifiable_priority_queue { - public: - typedef Modifiable_priority_queue Self; - - typedef IndexedType_ IndexedType; - typedef Compare_ Compare; - typedef ID_ ID; - - typedef boost::relaxed_heap Heap; - typedef typename Heap::value_type value_type; - typedef typename Heap::size_type size_type; - - typedef bool handle; - - public: - Modifiable_priority_queue(size_type largest_ID, Compare const& c, ID const& id) : mHeap(largest_ID, c, id) { } - - handle push(value_type const& v) { - mHeap.push(v); - return handle(true); - } - - handle update(value_type const& v, handle h) { - mHeap.update(v); - return h; - } - - handle erase(value_type const& v, handle) { - mHeap.remove(v); - return null_handle(); - } - - value_type top() const { - return mHeap.top(); - } - - void pop() { - mHeap.pop(); - } - - bool empty() const { - return mHeap.empty(); - } - - bool contains(value_type const& v) { - return mHeap.contains(v); - } - - boost::optional extract_top() { - boost::optional r; - if (!empty()) { - value_type v = top(); - pop(); - r = boost::optional(v); - } - return r; - } - - static handle null_handle() { - return handle(false); - } - - private: - Heap mHeap; -}; - -} // namespace CGAL - -#endif // CONTRACTION_CGAL_QUEUE_MODIFIABLE_PRIORITY_QUEUE_H_ diff --git a/src/Contraction/include/gudhi/Contraction/Edge_profile.h b/src/Contraction/include/gudhi/Contraction/Edge_profile.h index 78a7afd1..0e914de9 100644 --- a/src/Contraction/include/gudhi/Contraction/Edge_profile.h +++ b/src/Contraction/include/gudhi/Contraction/Edge_profile.h @@ -12,6 +12,7 @@ #define CONTRACTION_EDGE_PROFILE_H_ #include +#include namespace Gudhi { diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h index e1f3b3c2..7a99548d 100644 --- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h +++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h @@ -13,9 +13,6 @@ #ifndef SKELETON_BLOCKER_CONTRACTOR_H_ #define SKELETON_BLOCKER_CONTRACTOR_H_ -// todo remove the queue to be independent from cgald -#include - #include #include #include @@ -29,6 +26,8 @@ #include #include +// todo remove the queue to be independent from cgald +#include #include #include diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index 18bf0ce2..fe25b1e7 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -50,8 +50,7 @@ Author: Vincent Rouvreau
Introduced in: GUDHI 1.3.0
Copyright: MIT [(GPL v3)](../../licensing/)
- Requires: \ref eigen3 and
- \ref cgal 4.11.0 + Requires: \ref eigen3 and \ref cgal ≥ 4.11.0 @@ -127,7 +126,7 @@ Author: Siargey Kachanovich
Introduced in: GUDHI 1.3.0
Copyright: MIT ([GPL v3](../../licensing/) for Euclidean version)
- Euclidean version requires: \ref cgal ≥ 4.11.0 and \ref eigen3 + Euclidean version requires: \ref eigen3 and \ref cgal ≥ 4.11.0 @@ -263,6 +262,7 @@ Author: David Salinas
Introduced in: GUDHI 1.1.0
Copyright: MIT [(GPL v3)](../../licensing/)
+ Requires: \ref cgal ≥ 4.11.0 @@ -324,7 +324,7 @@ Author: Clément Jamin
Introduced in: GUDHI 2.0.0
Copyright: MIT [(GPL v3)](../../licensing/)
- Requires: \ref cgal ≥ 4.11.0 and \ref eigen3 + Requires: \ref eigen3 and \ref cgal ≥ 4.11.0 -- cgit v1.2.3 From 30e2090066fae2734828c78fdd070a2559da135f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 18 Jun 2019 16:09:29 +0200 Subject: scripts cleanup --- scripts/check_google_style.sh | 33 - scripts/concatenate_utils_readme.sh | 4 - scripts/cpplint.py | 6324 ----------------------------------- scripts/cpplint_to_cppcheckxml.py | 66 - 4 files changed, 6427 deletions(-) delete mode 100755 scripts/check_google_style.sh delete mode 100755 scripts/concatenate_utils_readme.sh delete mode 100644 scripts/cpplint.py delete mode 100755 scripts/cpplint_to_cppcheckxml.py diff --git a/scripts/check_google_style.sh b/scripts/check_google_style.sh deleted file mode 100755 index 9ed5cad0..00000000 --- a/scripts/check_google_style.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -#usage bash check_google_style.sh - -# VERSION CHECK -ROOT_DIR=.. -FILE_TO_CHECK="$1" -PYTHON_SCRIPT="$2" -LOG_FILE=$FILE_TO_CHECK.cpplint - -if [ -f $LOG_FILE ]; then - rm -f $LOG_FILE -fi - -if [ ! -f $FILE_TO_CHECK ]; then - echo "File not found! : $FILE_TO_CHECK" | tee -a $LOG_FILE - exit 1 -fi - -# CPPLINT FILE -echo "File: $FILE_TO_CHECK" 2>&1 | tee -a $LOG_FILE -python $PYTHON_SCRIPT --linelength=120 $FILE_TO_CHECK 2>&1 | tee -a $LOG_FILE - -LINE_ERRORS=`grep "Total errors found:" $LOG_FILE` -NB_ERRORS=${LINE_ERRORS:20} - -if [ "$NB_ERRORS" -gt 20 ] -then - echo "## Too many errors ($NB_ERRORS) in $FILE_TO_CHECK" 2>&1 | tee -a $LOG_FILE - exit 1 -fi - -echo "Acceptable number of errors ($NB_ERRORS) in $FILE_TO_CHECK" 2>&1 | tee -a $LOG_FILE -exit 0 diff --git a/scripts/concatenate_utils_readme.sh b/scripts/concatenate_utils_readme.sh deleted file mode 100755 index f5390044..00000000 --- a/scripts/concatenate_utils_readme.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -rm README_FOR_UTILITIES.txt -locate utilities/README | grep `svn info | grep '^URL:' | egrep -o '(tags|branches)/[^/]+|trunk' | egrep -o '[^/]+$'` | xargs cat -- >> README_FOR_UTILITIES.txt \ No newline at end of file diff --git a/scripts/cpplint.py b/scripts/cpplint.py deleted file mode 100644 index 0e186e23..00000000 --- a/scripts/cpplint.py +++ /dev/null @@ -1,6324 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2009 Google Inc. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Does google-lint on c++ files. - -The goal of this script is to identify places in the code that *may* -be in non-compliance with google style. It does not attempt to fix -up these problems -- the point is to educate. It does also not -attempt to find all problems, or to ensure that everything it does -find is legitimately a problem. - -In particular, we can get very confused by /* and // inside strings! -We do a small hack, which is to ignore //'s with "'s after them on the -same line, but it is far from perfect (in either direction). -""" - -import codecs -import copy -import getopt -import math # for log -import os -import re -import sre_compile -import string -import sys -import unicodedata - - -_USAGE = """ -Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] - [--counting=total|toplevel|detailed] [--root=subdir] - [--linelength=digits] - [file] ... - - The style guidelines this tries to follow are those in - http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml - - Every problem is given a confidence score from 1-5, with 5 meaning we are - certain of the problem, and 1 meaning it could be a legitimate construct. - This will miss some errors, and is not a substitute for a code review. - - To suppress false-positive errors of a certain category, add a - 'NOLINT(category)' comment to the line. NOLINT or NOLINT(*) - suppresses errors of all categories on that line. - - The files passed in will be linted; at least one file must be provided. - Default linted extensions are .cc, .cpp, .cu, .cuh and .h. Change the - extensions with the --extensions flag. - - Flags: - - output=vs7 - By default, the output is formatted to ease emacs parsing. Visual Studio - compatible output (vs7) may also be used. Other formats are unsupported. - - verbose=# - Specify a number 0-5 to restrict errors to certain verbosity levels. - - filter=-x,+y,... - Specify a comma-separated list of category-filters to apply: only - error messages whose category names pass the filters will be printed. - (Category names are printed with the message and look like - "[whitespace/indent]".) Filters are evaluated left to right. - "-FOO" and "FOO" means "do not print categories that start with FOO". - "+FOO" means "do print categories that start with FOO". - - Examples: --filter=-whitespace,+whitespace/braces - --filter=whitespace,runtime/printf,+runtime/printf_format - --filter=-,+build/include_what_you_use - - To see a list of all the categories used in cpplint, pass no arg: - --filter= - - counting=total|toplevel|detailed - The total number of errors found is always printed. If - 'toplevel' is provided, then the count of errors in each of - the top-level categories like 'build' and 'whitespace' will - also be printed. If 'detailed' is provided, then a count - is provided for each category like 'build/class'. - - root=subdir - The root directory used for deriving header guard CPP variable. - By default, the header guard CPP variable is calculated as the relative - path to the directory that contains .git, .hg, or .svn. When this flag - is specified, the relative path is calculated from the specified - directory. If the specified directory does not exist, this flag is - ignored. - - Examples: - Assuming that src/.git exists, the header guard CPP variables for - src/chrome/browser/ui/browser.h are: - - No flag => CHROME_BROWSER_UI_BROWSER_H_ - --root=chrome => BROWSER_UI_BROWSER_H_ - --root=chrome/browser => UI_BROWSER_H_ - - linelength=digits - This is the allowed line length for the project. The default value is - 80 characters. - - Examples: - --linelength=120 - - extensions=extension,extension,... - The allowed file extensions that cpplint will check - - Examples: - --extensions=hpp,cpp - - cpplint.py supports per-directory configurations specified in CPPLINT.cfg - files. CPPLINT.cfg file can contain a number of key=value pairs. - Currently the following options are supported: - - set noparent - filter=+filter1,-filter2,... - exclude_files=regex - linelength=80 - - "set noparent" option prevents cpplint from traversing directory tree - upwards looking for more .cfg files in parent directories. This option - is usually placed in the top-level project directory. - - The "filter" option is similar in function to --filter flag. It specifies - message filters in addition to the |_DEFAULT_FILTERS| and those specified - through --filter command-line flag. - - "exclude_files" allows to specify a regular expression to be matched against - a file name. If the expression matches, the file is skipped and not run - through liner. - - "linelength" allows to specify the allowed line length for the project. - - CPPLINT.cfg has an effect on files in the same directory and all - sub-directories, unless overridden by a nested configuration file. - - Example file: - filter=-build/include_order,+build/include_alpha - exclude_files=.*\.cc - - The above example disables build/include_order warning and enables - build/include_alpha as well as excludes all .cc from being - processed by linter, in the current directory (where the .cfg - file is located) and all sub-directories. -""" - -# We categorize each error message we print. Here are the categories. -# We want an explicit list so we can list them all in cpplint --filter=. -# If you add a new error message with a new category, add it to the list -# here! cpplint_unittest.py should tell you if you forget to do this. -_ERROR_CATEGORIES = [ - 'build/class', - 'build/c++11', - 'build/deprecated', - 'build/endif_comment', - 'build/explicit_make_pair', - 'build/forward_decl', - 'build/header_guard', - 'build/include', - 'build/include_alpha', - 'build/include_order', - 'build/include_what_you_use', - 'build/namespaces', - 'build/printf_format', - 'build/storage_class', - 'legal/copyright', - 'readability/alt_tokens', - 'readability/braces', - 'readability/casting', - 'readability/check', - 'readability/constructors', - 'readability/fn_size', - 'readability/function', - 'readability/inheritance', - 'readability/multiline_comment', - 'readability/multiline_string', - 'readability/namespace', - 'readability/nolint', - 'readability/nul', - 'readability/strings', - 'readability/todo', - 'readability/utf8', - 'runtime/arrays', - 'runtime/casting', - 'runtime/explicit', - 'runtime/int', - 'runtime/init', - 'runtime/invalid_increment', - 'runtime/member_string_references', - 'runtime/memset', - 'runtime/indentation_namespace', - 'runtime/operator', - 'runtime/printf', - 'runtime/printf_format', - 'runtime/references', - 'runtime/string', - 'runtime/threadsafe_fn', - 'runtime/vlog', - 'whitespace/blank_line', - 'whitespace/braces', - 'whitespace/comma', - 'whitespace/comments', - 'whitespace/empty_conditional_body', - 'whitespace/empty_loop_body', - 'whitespace/end_of_line', - 'whitespace/ending_newline', - 'whitespace/forcolon', - 'whitespace/indent', - 'whitespace/line_length', - 'whitespace/newline', - 'whitespace/operators', - 'whitespace/parens', - 'whitespace/semicolon', - 'whitespace/tab', - 'whitespace/todo', - ] - -# These error categories are no longer enforced by cpplint, but for backwards- -# compatibility they may still appear in NOLINT comments. -_LEGACY_ERROR_CATEGORIES = [ - 'readability/streams', - ] - -# The default state of the category filter. This is overridden by the --filter= -# flag. By default all errors are on, so only add here categories that should be -# off by default (i.e., categories that must be enabled by the --filter= flags). -# All entries here should start with a '-' or '+', as in the --filter= flag. -_DEFAULT_FILTERS = ['-build/include_alpha'] - -# We used to check for high-bit characters, but after much discussion we -# decided those were OK, as long as they were in UTF-8 and didn't represent -# hard-coded international strings, which belong in a separate i18n file. - -# C++ headers -_CPP_HEADERS = frozenset([ - # Legacy - 'algobase.h', - 'algo.h', - 'alloc.h', - 'builtinbuf.h', - 'bvector.h', - 'complex.h', - 'defalloc.h', - 'deque.h', - 'editbuf.h', - 'fstream.h', - 'function.h', - 'hash_map', - 'hash_map.h', - 'hash_set', - 'hash_set.h', - 'hashtable.h', - 'heap.h', - 'indstream.h', - 'iomanip.h', - 'iostream.h', - 'istream.h', - 'iterator.h', - 'list.h', - 'map.h', - 'multimap.h', - 'multiset.h', - 'ostream.h', - 'pair.h', - 'parsestream.h', - 'pfstream.h', - 'procbuf.h', - 'pthread_alloc', - 'pthread_alloc.h', - 'rope', - 'rope.h', - 'ropeimpl.h', - 'set.h', - 'slist', - 'slist.h', - 'stack.h', - 'stdiostream.h', - 'stl_alloc.h', - 'stl_relops.h', - 'streambuf.h', - 'stream.h', - 'strfile.h', - 'strstream.h', - 'tempbuf.h', - 'tree.h', - 'type_traits.h', - 'vector.h', - # 17.6.1.2 C++ library headers - 'algorithm', - 'array', - 'atomic', - 'bitset', - 'chrono', - 'codecvt', - 'complex', - 'condition_variable', - 'deque', - 'exception', - 'forward_list', - 'fstream', - 'functional', - 'future', - 'initializer_list', - 'iomanip', - 'ios', - 'iosfwd', - 'iostream', - 'istream', - 'iterator', - 'limits', - 'list', - 'locale', - 'map', - 'memory', - 'mutex', - 'new', - 'numeric', - 'ostream', - 'queue', - 'random', - 'ratio', - 'regex', - 'set', - 'sstream', - 'stack', - 'stdexcept', - 'streambuf', - 'string', - 'strstream', - 'system_error', - 'thread', - 'tuple', - 'typeindex', - 'typeinfo', - 'type_traits', - 'unordered_map', - 'unordered_set', - 'utility', - 'valarray', - 'vector', - # 17.6.1.2 C++ headers for C library facilities - 'cassert', - 'ccomplex', - 'cctype', - 'cerrno', - 'cfenv', - 'cfloat', - 'cinttypes', - 'ciso646', - 'climits', - 'clocale', - 'cmath', - 'csetjmp', - 'csignal', - 'cstdalign', - 'cstdarg', - 'cstdbool', - 'cstddef', - 'cstdint', - 'cstdio', - 'cstdlib', - 'cstring', - 'ctgmath', - 'ctime', - 'cuchar', - 'cwchar', - 'cwctype', - ]) - - -# These headers are excluded from [build/include] and [build/include_order] -# checks: -# - Anything not following google file name conventions (containing an -# uppercase character, such as Python.h or nsStringAPI.h, for example). -# - Lua headers. -_THIRD_PARTY_HEADERS_PATTERN = re.compile( - r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$') - - -# Assertion macros. These are defined in base/logging.h and -# testing/base/gunit.h. Note that the _M versions need to come first -# for substring matching to work. -_CHECK_MACROS = [ - 'DCHECK', 'CHECK', - 'EXPECT_TRUE_M', 'EXPECT_TRUE', - 'ASSERT_TRUE_M', 'ASSERT_TRUE', - 'EXPECT_FALSE_M', 'EXPECT_FALSE', - 'ASSERT_FALSE_M', 'ASSERT_FALSE', - ] - -# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE -_CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS]) - -for op, replacement in [('==', 'EQ'), ('!=', 'NE'), - ('>=', 'GE'), ('>', 'GT'), - ('<=', 'LE'), ('<', 'LT')]: - _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement - _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement - _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement - _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement - _CHECK_REPLACEMENT['EXPECT_TRUE_M'][op] = 'EXPECT_%s_M' % replacement - _CHECK_REPLACEMENT['ASSERT_TRUE_M'][op] = 'ASSERT_%s_M' % replacement - -for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'), - ('>=', 'LT'), ('>', 'LE'), - ('<=', 'GT'), ('<', 'GE')]: - _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement - _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement - _CHECK_REPLACEMENT['EXPECT_FALSE_M'][op] = 'EXPECT_%s_M' % inv_replacement - _CHECK_REPLACEMENT['ASSERT_FALSE_M'][op] = 'ASSERT_%s_M' % inv_replacement - -# Alternative tokens and their replacements. For full list, see section 2.5 -# Alternative tokens [lex.digraph] in the C++ standard. -# -# Digraphs (such as '%:') are not included here since it's a mess to -# match those on a word boundary. -_ALT_TOKEN_REPLACEMENT = { - 'and': '&&', - 'bitor': '|', - 'or': '||', - 'xor': '^', - 'compl': '~', - 'bitand': '&', - 'and_eq': '&=', - 'or_eq': '|=', - 'xor_eq': '^=', - 'not': '!', - 'not_eq': '!=' - } - -# Compile regular expression that matches all the above keywords. The "[ =()]" -# bit is meant to avoid matching these keywords outside of boolean expressions. -# -# False positives include C-style multi-line comments and multi-line strings -# but those have always been troublesome for cpplint. -_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile( - r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)') - - -# These constants define types of headers for use with -# _IncludeState.CheckNextIncludeOrder(). -_C_SYS_HEADER = 1 -_CPP_SYS_HEADER = 2 -_LIKELY_MY_HEADER = 3 -_POSSIBLE_MY_HEADER = 4 -_OTHER_HEADER = 5 - -# These constants define the current inline assembly state -_NO_ASM = 0 # Outside of inline assembly block -_INSIDE_ASM = 1 # Inside inline assembly block -_END_ASM = 2 # Last line of inline assembly block -_BLOCK_ASM = 3 # The whole block is an inline assembly block - -# Match start of assembly blocks -_MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)' - r'(?:\s+(volatile|__volatile__))?' - r'\s*[{(]') - - -_regexp_compile_cache = {} - -# {str, set(int)}: a map from error categories to sets of linenumbers -# on which those errors are expected and should be suppressed. -_error_suppressions = {} - -# The root directory used for deriving header guard CPP variable. -# This is set by --root flag. -_root = None - -# The allowed line length of files. -# This is set by --linelength flag. -_line_length = 80 - -# The allowed extensions for file names -# This is set by --extensions flag. -_valid_extensions = set(['cc', 'h', 'cpp', 'cu', 'cuh']) - -def ParseNolintSuppressions(filename, raw_line, linenum, error): - """Updates the global list of error-suppressions. - - Parses any NOLINT comments on the current line, updating the global - error_suppressions store. Reports an error if the NOLINT comment - was malformed. - - Args: - filename: str, the name of the input file. - raw_line: str, the line of input text, with comments. - linenum: int, the number of the current line. - error: function, an error handler. - """ - matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line) - if matched: - if matched.group(1): - suppressed_line = linenum + 1 - else: - suppressed_line = linenum - category = matched.group(2) - if category in (None, '(*)'): # => "suppress all" - _error_suppressions.setdefault(None, set()).add(suppressed_line) - else: - if category.startswith('(') and category.endswith(')'): - category = category[1:-1] - if category in _ERROR_CATEGORIES: - _error_suppressions.setdefault(category, set()).add(suppressed_line) - elif category not in _LEGACY_ERROR_CATEGORIES: - error(filename, linenum, 'readability/nolint', 5, - 'Unknown NOLINT error category: %s' % category) - - -def ResetNolintSuppressions(): - """Resets the set of NOLINT suppressions to empty.""" - _error_suppressions.clear() - - -def IsErrorSuppressedByNolint(category, linenum): - """Returns true if the specified error category is suppressed on this line. - - Consults the global error_suppressions map populated by - ParseNolintSuppressions/ResetNolintSuppressions. - - Args: - category: str, the category of the error. - linenum: int, the current line number. - Returns: - bool, True iff the error should be suppressed due to a NOLINT comment. - """ - return (linenum in _error_suppressions.get(category, set()) or - linenum in _error_suppressions.get(None, set())) - - -def Match(pattern, s): - """Matches the string with the pattern, caching the compiled regexp.""" - # The regexp compilation caching is inlined in both Match and Search for - # performance reasons; factoring it out into a separate function turns out - # to be noticeably expensive. - if pattern not in _regexp_compile_cache: - _regexp_compile_cache[pattern] = sre_compile.compile(pattern) - return _regexp_compile_cache[pattern].match(s) - - -def ReplaceAll(pattern, rep, s): - """Replaces instances of pattern in a string with a replacement. - - The compiled regex is kept in a cache shared by Match and Search. - - Args: - pattern: regex pattern - rep: replacement text - s: search string - - Returns: - string with replacements made (or original string if no replacements) - """ - if pattern not in _regexp_compile_cache: - _regexp_compile_cache[pattern] = sre_compile.compile(pattern) - return _regexp_compile_cache[pattern].sub(rep, s) - - -def Search(pattern, s): - """Searches the string for the pattern, caching the compiled regexp.""" - if pattern not in _regexp_compile_cache: - _regexp_compile_cache[pattern] = sre_compile.compile(pattern) - return _regexp_compile_cache[pattern].search(s) - - -class _IncludeState(object): - """Tracks line numbers for includes, and the order in which includes appear. - - include_list contains list of lists of (header, line number) pairs. - It's a lists of lists rather than just one flat list to make it - easier to update across preprocessor boundaries. - - Call CheckNextIncludeOrder() once for each header in the file, passing - in the type constants defined above. Calls in an illegal order will - raise an _IncludeError with an appropriate error message. - - """ - # self._section will move monotonically through this set. If it ever - # needs to move backwards, CheckNextIncludeOrder will raise an error. - _INITIAL_SECTION = 0 - _MY_H_SECTION = 1 - _C_SECTION = 2 - _CPP_SECTION = 3 - _OTHER_H_SECTION = 4 - - _TYPE_NAMES = { - _C_SYS_HEADER: 'C system header', - _CPP_SYS_HEADER: 'C++ system header', - _LIKELY_MY_HEADER: 'header this file implements', - _POSSIBLE_MY_HEADER: 'header this file may implement', - _OTHER_HEADER: 'other header', - } - _SECTION_NAMES = { - _INITIAL_SECTION: "... nothing. (This can't be an error.)", - _MY_H_SECTION: 'a header this file implements', - _C_SECTION: 'C system header', - _CPP_SECTION: 'C++ system header', - _OTHER_H_SECTION: 'other header', - } - - def __init__(self): - self.include_list = [[]] - self.ResetSection('') - - def FindHeader(self, header): - """Check if a header has already been included. - - Args: - header: header to check. - Returns: - Line number of previous occurrence, or -1 if the header has not - been seen before. - """ - for section_list in self.include_list: - for f in section_list: - if f[0] == header: - return f[1] - return -1 - - def ResetSection(self, directive): - """Reset section checking for preprocessor directive. - - Args: - directive: preprocessor directive (e.g. "if", "else"). - """ - # The name of the current section. - self._section = self._INITIAL_SECTION - # The path of last found header. - self._last_header = '' - - # Update list of includes. Note that we never pop from the - # include list. - if directive in ('if', 'ifdef', 'ifndef'): - self.include_list.append([]) - elif directive in ('else', 'elif'): - self.include_list[-1] = [] - - def SetLastHeader(self, header_path): - self._last_header = header_path - - def CanonicalizeAlphabeticalOrder(self, header_path): - """Returns a path canonicalized for alphabetical comparison. - - - replaces "-" with "_" so they both cmp the same. - - removes '-inl' since we don't require them to be after the main header. - - lowercase everything, just in case. - - Args: - header_path: Path to be canonicalized. - - Returns: - Canonicalized path. - """ - return header_path.replace('-inl.h', '.h').replace('-', '_').lower() - - def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path): - """Check if a header is in alphabetical order with the previous header. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - header_path: Canonicalized header to be checked. - - Returns: - Returns true if the header is in alphabetical order. - """ - # If previous section is different from current section, _last_header will - # be reset to empty string, so it's always less than current header. - # - # If previous line was a blank line, assume that the headers are - # intentionally sorted the way they are. - if (self._last_header > header_path and - Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])): - return False - return True - - def CheckNextIncludeOrder(self, header_type): - """Returns a non-empty error message if the next header is out of order. - - This function also updates the internal state to be ready to check - the next include. - - Args: - header_type: One of the _XXX_HEADER constants defined above. - - Returns: - The empty string if the header is in the right order, or an - error message describing what's wrong. - - """ - error_message = ('Found %s after %s' % - (self._TYPE_NAMES[header_type], - self._SECTION_NAMES[self._section])) - - last_section = self._section - - if header_type == _C_SYS_HEADER: - if self._section <= self._C_SECTION: - self._section = self._C_SECTION - else: - self._last_header = '' - return error_message - elif header_type == _CPP_SYS_HEADER: - if self._section <= self._CPP_SECTION: - self._section = self._CPP_SECTION - else: - self._last_header = '' - return error_message - elif header_type == _LIKELY_MY_HEADER: - if self._section <= self._MY_H_SECTION: - self._section = self._MY_H_SECTION - else: - self._section = self._OTHER_H_SECTION - elif header_type == _POSSIBLE_MY_HEADER: - if self._section <= self._MY_H_SECTION: - self._section = self._MY_H_SECTION - else: - # This will always be the fallback because we're not sure - # enough that the header is associated with this file. - self._section = self._OTHER_H_SECTION - else: - assert header_type == _OTHER_HEADER - self._section = self._OTHER_H_SECTION - - if last_section != self._section: - self._last_header = '' - - return '' - - -class _CppLintState(object): - """Maintains module-wide state..""" - - def __init__(self): - self.verbose_level = 1 # global setting. - self.error_count = 0 # global count of reported errors - # filters to apply when emitting error messages - self.filters = _DEFAULT_FILTERS[:] - # backup of filter list. Used to restore the state after each file. - self._filters_backup = self.filters[:] - self.counting = 'total' # In what way are we counting errors? - self.errors_by_category = {} # string to int dict storing error counts - - # output format: - # "emacs" - format that emacs can parse (default) - # "vs7" - format that Microsoft Visual Studio 7 can parse - self.output_format = 'emacs' - - def SetOutputFormat(self, output_format): - """Sets the output format for errors.""" - self.output_format = output_format - - def SetVerboseLevel(self, level): - """Sets the module's verbosity, and returns the previous setting.""" - last_verbose_level = self.verbose_level - self.verbose_level = level - return last_verbose_level - - def SetCountingStyle(self, counting_style): - """Sets the module's counting options.""" - self.counting = counting_style - - def SetFilters(self, filters): - """Sets the error-message filters. - - These filters are applied when deciding whether to emit a given - error message. - - Args: - filters: A string of comma-separated filters (eg "+whitespace/indent"). - Each filter should start with + or -; else we die. - - Raises: - ValueError: The comma-separated filters did not all start with '+' or '-'. - E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter" - """ - # Default filters always have less priority than the flag ones. - self.filters = _DEFAULT_FILTERS[:] - self.AddFilters(filters) - - def AddFilters(self, filters): - """ Adds more filters to the existing list of error-message filters. """ - for filt in filters.split(','): - clean_filt = filt.strip() - if clean_filt: - self.filters.append(clean_filt) - for filt in self.filters: - if not (filt.startswith('+') or filt.startswith('-')): - raise ValueError('Every filter in --filters must start with + or -' - ' (%s does not)' % filt) - - def BackupFilters(self): - """ Saves the current filter list to backup storage.""" - self._filters_backup = self.filters[:] - - def RestoreFilters(self): - """ Restores filters previously backed up.""" - self.filters = self._filters_backup[:] - - def ResetErrorCounts(self): - """Sets the module's error statistic back to zero.""" - self.error_count = 0 - self.errors_by_category = {} - - def IncrementErrorCount(self, category): - """Bumps the module's error statistic.""" - self.error_count += 1 - if self.counting in ('toplevel', 'detailed'): - if self.counting != 'detailed': - category = category.split('/')[0] - if category not in self.errors_by_category: - self.errors_by_category[category] = 0 - self.errors_by_category[category] += 1 - - def PrintErrorCounts(self): - """Print a summary of errors by category, and the total.""" - for category, count in self.errors_by_category.iteritems(): - sys.stderr.write('Category \'%s\' errors found: %d\n' % - (category, count)) - sys.stderr.write('Total errors found: %d\n' % self.error_count) - -_cpplint_state = _CppLintState() - - -def _OutputFormat(): - """Gets the module's output format.""" - return _cpplint_state.output_format - - -def _SetOutputFormat(output_format): - """Sets the module's output format.""" - _cpplint_state.SetOutputFormat(output_format) - - -def _VerboseLevel(): - """Returns the module's verbosity setting.""" - return _cpplint_state.verbose_level - - -def _SetVerboseLevel(level): - """Sets the module's verbosity, and returns the previous setting.""" - return _cpplint_state.SetVerboseLevel(level) - - -def _SetCountingStyle(level): - """Sets the module's counting options.""" - _cpplint_state.SetCountingStyle(level) - - -def _Filters(): - """Returns the module's list of output filters, as a list.""" - return _cpplint_state.filters - - -def _SetFilters(filters): - """Sets the module's error-message filters. - - These filters are applied when deciding whether to emit a given - error message. - - Args: - filters: A string of comma-separated filters (eg "whitespace/indent"). - Each filter should start with + or -; else we die. - """ - _cpplint_state.SetFilters(filters) - -def _AddFilters(filters): - """Adds more filter overrides. - - Unlike _SetFilters, this function does not reset the current list of filters - available. - - Args: - filters: A string of comma-separated filters (eg "whitespace/indent"). - Each filter should start with + or -; else we die. - """ - _cpplint_state.AddFilters(filters) - -def _BackupFilters(): - """ Saves the current filter list to backup storage.""" - _cpplint_state.BackupFilters() - -def _RestoreFilters(): - """ Restores filters previously backed up.""" - _cpplint_state.RestoreFilters() - -class _FunctionState(object): - """Tracks current function name and the number of lines in its body.""" - - _NORMAL_TRIGGER = 250 # for --v=0, 500 for --v=1, etc. - _TEST_TRIGGER = 400 # about 50% more than _NORMAL_TRIGGER. - - def __init__(self): - self.in_a_function = False - self.lines_in_function = 0 - self.current_function = '' - - def Begin(self, function_name): - """Start analyzing function body. - - Args: - function_name: The name of the function being tracked. - """ - self.in_a_function = True - self.lines_in_function = 0 - self.current_function = function_name - - def Count(self): - """Count line in current function body.""" - if self.in_a_function: - self.lines_in_function += 1 - - def Check(self, error, filename, linenum): - """Report if too many lines in function body. - - Args: - error: The function to call with any errors found. - filename: The name of the current file. - linenum: The number of the line to check. - """ - if Match(r'T(EST|est)', self.current_function): - base_trigger = self._TEST_TRIGGER - else: - base_trigger = self._NORMAL_TRIGGER - trigger = base_trigger * 2**_VerboseLevel() - - if self.lines_in_function > trigger: - error_level = int(math.log(self.lines_in_function / base_trigger, 2)) - # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ... - if error_level > 5: - error_level = 5 - error(filename, linenum, 'readability/fn_size', error_level, - 'Small and focused functions are preferred:' - ' %s has %d non-comment lines' - ' (error triggered by exceeding %d lines).' % ( - self.current_function, self.lines_in_function, trigger)) - - def End(self): - """Stop analyzing function body.""" - self.in_a_function = False - - -class _IncludeError(Exception): - """Indicates a problem with the include order in a file.""" - pass - - -class FileInfo(object): - """Provides utility functions for filenames. - - FileInfo provides easy access to the components of a file's path - relative to the project root. - """ - - def __init__(self, filename): - self._filename = filename - - def FullName(self): - """Make Windows paths like Unix.""" - return os.path.abspath(self._filename).replace('\\', '/') - - def RepositoryName(self): - """FullName after removing the local path to the repository. - - If we have a real absolute path name here we can try to do something smart: - detecting the root of the checkout and truncating /path/to/checkout from - the name so that we get header guards that don't include things like - "C:\Documents and Settings\..." or "/home/username/..." in them and thus - people on different computers who have checked the source out to different - locations won't see bogus errors. - """ - fullname = self.FullName() - - if os.path.exists(fullname): - project_dir = os.path.dirname(fullname) - - if os.path.exists(os.path.join(project_dir, ".svn")): - # If there's a .svn file in the current directory, we recursively look - # up the directory tree for the top of the SVN checkout - root_dir = project_dir - one_up_dir = os.path.dirname(root_dir) - while os.path.exists(os.path.join(one_up_dir, ".svn")): - root_dir = os.path.dirname(root_dir) - one_up_dir = os.path.dirname(one_up_dir) - - prefix = os.path.commonprefix([root_dir, project_dir]) - return fullname[len(prefix) + 1:] - - # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by - # searching up from the current path. - root_dir = os.path.dirname(fullname) - while (root_dir != os.path.dirname(root_dir) and - not os.path.exists(os.path.join(root_dir, ".git")) and - not os.path.exists(os.path.join(root_dir, ".hg")) and - not os.path.exists(os.path.join(root_dir, ".svn"))): - root_dir = os.path.dirname(root_dir) - - if (os.path.exists(os.path.join(root_dir, ".git")) or - os.path.exists(os.path.join(root_dir, ".hg")) or - os.path.exists(os.path.join(root_dir, ".svn"))): - prefix = os.path.commonprefix([root_dir, project_dir]) - return fullname[len(prefix) + 1:] - - # Don't know what to do; header guard warnings may be wrong... - return fullname - - def Split(self): - """Splits the file into the directory, basename, and extension. - - For 'chrome/browser/browser.cc', Split() would - return ('chrome/browser', 'browser', '.cc') - - Returns: - A tuple of (directory, basename, extension). - """ - - googlename = self.RepositoryName() - project, rest = os.path.split(googlename) - return (project,) + os.path.splitext(rest) - - def BaseName(self): - """File base name - text after the final slash, before the final period.""" - return self.Split()[1] - - def Extension(self): - """File extension - text following the final period.""" - return self.Split()[2] - - def NoExtension(self): - """File has no source file extension.""" - return '/'.join(self.Split()[0:2]) - - def IsSource(self): - """File has a source file extension.""" - return self.Extension()[1:] in ('c', 'cc', 'cpp', 'cxx') - - -def _ShouldPrintError(category, confidence, linenum): - """If confidence >= verbose, category passes filter and is not suppressed.""" - - # There are three ways we might decide not to print an error message: - # a "NOLINT(category)" comment appears in the source, - # the verbosity level isn't high enough, or the filters filter it out. - if IsErrorSuppressedByNolint(category, linenum): - return False - - if confidence < _cpplint_state.verbose_level: - return False - - is_filtered = False - for one_filter in _Filters(): - if one_filter.startswith('-'): - if category.startswith(one_filter[1:]): - is_filtered = True - elif one_filter.startswith('+'): - if category.startswith(one_filter[1:]): - is_filtered = False - else: - assert False # should have been checked for in SetFilter. - if is_filtered: - return False - - return True - - -def Error(filename, linenum, category, confidence, message): - """Logs the fact we've found a lint error. - - We log where the error was found, and also our confidence in the error, - that is, how certain we are this is a legitimate style regression, and - not a misidentification or a use that's sometimes justified. - - False positives can be suppressed by the use of - "cpplint(category)" comments on the offending line. These are - parsed into _error_suppressions. - - Args: - filename: The name of the file containing the error. - linenum: The number of the line containing the error. - category: A string used to describe the "category" this bug - falls under: "whitespace", say, or "runtime". Categories - may have a hierarchy separated by slashes: "whitespace/indent". - confidence: A number from 1-5 representing a confidence score for - the error, with 5 meaning that we are certain of the problem, - and 1 meaning that it could be a legitimate construct. - message: The error message. - """ - if _ShouldPrintError(category, confidence, linenum): - _cpplint_state.IncrementErrorCount(category) - if _cpplint_state.output_format == 'vs7': - sys.stderr.write('%s(%s): %s [%s] [%d]\n' % ( - filename, linenum, message, category, confidence)) - elif _cpplint_state.output_format == 'eclipse': - sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % ( - filename, linenum, message, category, confidence)) - else: - sys.stderr.write('%s:%s: %s [%s] [%d]\n' % ( - filename, linenum, message, category, confidence)) - - -# Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard. -_RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile( - r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)') -# Match a single C style comment on the same line. -_RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/' -# Matches multi-line C style comments. -# This RE is a little bit more complicated than one might expect, because we -# have to take care of space removals tools so we can handle comments inside -# statements better. -# The current rule is: We only clear spaces from both sides when we're at the -# end of the line. Otherwise, we try to remove spaces from the right side, -# if this doesn't work we try on left side but only if there's a non-character -# on the right. -_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile( - r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' + - _RE_PATTERN_C_COMMENTS + r'\s+|' + - r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' + - _RE_PATTERN_C_COMMENTS + r')') - - -def IsCppString(line): - """Does line terminate so, that the next symbol is in string constant. - - This function does not consider single-line nor multi-line comments. - - Args: - line: is a partial line of code starting from the 0..n. - - Returns: - True, if next character appended to 'line' is inside a - string constant. - """ - - line = line.replace(r'\\', 'XX') # after this, \\" does not match to \" - return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1 - - -def CleanseRawStrings(raw_lines): - """Removes C++11 raw strings from lines. - - Before: - static const char kData[] = R"( - multi-line string - )"; - - After: - static const char kData[] = "" - (replaced by blank line) - ""; - - Args: - raw_lines: list of raw lines. - - Returns: - list of lines with C++11 raw strings replaced by empty strings. - """ - - delimiter = None - lines_without_raw_strings = [] - for line in raw_lines: - if delimiter: - # Inside a raw string, look for the end - end = line.find(delimiter) - if end >= 0: - # Found the end of the string, match leading space for this - # line and resume copying the original lines, and also insert - # a "" on the last line. - leading_space = Match(r'^(\s*)\S', line) - line = leading_space.group(1) + '""' + line[end + len(delimiter):] - delimiter = None - else: - # Haven't found the end yet, append a blank line. - line = '""' - - # Look for beginning of a raw string, and replace them with - # empty strings. This is done in a loop to handle multiple raw - # strings on the same line. - while delimiter is None: - # Look for beginning of a raw string. - # See 2.14.15 [lex.string] for syntax. - matched = Match(r'^(.*)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line) - if matched: - delimiter = ')' + matched.group(2) + '"' - - end = matched.group(3).find(delimiter) - if end >= 0: - # Raw string ended on same line - line = (matched.group(1) + '""' + - matched.group(3)[end + len(delimiter):]) - delimiter = None - else: - # Start of a multi-line raw string - line = matched.group(1) + '""' - else: - break - - lines_without_raw_strings.append(line) - - # TODO(unknown): if delimiter is not None here, we might want to - # emit a warning for unterminated string. - return lines_without_raw_strings - - -def FindNextMultiLineCommentStart(lines, lineix): - """Find the beginning marker for a multiline comment.""" - while lineix < len(lines): - if lines[lineix].strip().startswith('/*'): - # Only return this marker if the comment goes beyond this line - if lines[lineix].strip().find('*/', 2) < 0: - return lineix - lineix += 1 - return len(lines) - - -def FindNextMultiLineCommentEnd(lines, lineix): - """We are inside a comment, find the end marker.""" - while lineix < len(lines): - if lines[lineix].strip().endswith('*/'): - return lineix - lineix += 1 - return len(lines) - - -def RemoveMultiLineCommentsFromRange(lines, begin, end): - """Clears a range of lines for multi-line comments.""" - # Having // dummy comments makes the lines non-empty, so we will not get - # unnecessary blank line warnings later in the code. - for i in range(begin, end): - lines[i] = '/**/' - - -def RemoveMultiLineComments(filename, lines, error): - """Removes multiline (c-style) comments from lines.""" - lineix = 0 - while lineix < len(lines): - lineix_begin = FindNextMultiLineCommentStart(lines, lineix) - if lineix_begin >= len(lines): - return - lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin) - if lineix_end >= len(lines): - error(filename, lineix_begin + 1, 'readability/multiline_comment', 5, - 'Could not find end of multi-line comment') - return - RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1) - lineix = lineix_end + 1 - - -def CleanseComments(line): - """Removes //-comments and single-line C-style /* */ comments. - - Args: - line: A line of C++ source. - - Returns: - The line with single-line comments removed. - """ - commentpos = line.find('//') - if commentpos != -1 and not IsCppString(line[:commentpos]): - line = line[:commentpos].rstrip() - # get rid of /* ... */ - return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) - - -class CleansedLines(object): - """Holds 4 copies of all lines with different preprocessing applied to them. - - 1) elided member contains lines without strings and comments. - 2) lines member contains lines without comments. - 3) raw_lines member contains all the lines without processing. - 4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw - strings removed. - All these members are of , and of the same length. - """ - - def __init__(self, lines): - self.elided = [] - self.lines = [] - self.raw_lines = lines - self.num_lines = len(lines) - self.lines_without_raw_strings = CleanseRawStrings(lines) - for linenum in range(len(self.lines_without_raw_strings)): - self.lines.append(CleanseComments( - self.lines_without_raw_strings[linenum])) - elided = self._CollapseStrings(self.lines_without_raw_strings[linenum]) - self.elided.append(CleanseComments(elided)) - - def NumLines(self): - """Returns the number of lines represented.""" - return self.num_lines - - @staticmethod - def _CollapseStrings(elided): - """Collapses strings and chars on a line to simple "" or '' blocks. - - We nix strings first so we're not fooled by text like '"http://"' - - Args: - elided: The line being processed. - - Returns: - The line with collapsed strings. - """ - if _RE_PATTERN_INCLUDE.match(elided): - return elided - - # Remove escaped characters first to make quote/single quote collapsing - # basic. Things that look like escaped characters shouldn't occur - # outside of strings and chars. - elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided) - - # Replace quoted strings and digit separators. Both single quotes - # and double quotes are processed in the same loop, otherwise - # nested quotes wouldn't work. - collapsed = '' - while True: - # Find the first quote character - match = Match(r'^([^\'"]*)([\'"])(.*)$', elided) - if not match: - collapsed += elided - break - head, quote, tail = match.groups() - - if quote == '"': - # Collapse double quoted strings - second_quote = tail.find('"') - if second_quote >= 0: - collapsed += head + '""' - elided = tail[second_quote + 1:] - else: - # Unmatched double quote, don't bother processing the rest - # of the line since this is probably a multiline string. - collapsed += elided - break - else: - # Found single quote, check nearby text to eliminate digit separators. - # - # There is no special handling for floating point here, because - # the integer/fractional/exponent parts would all be parsed - # correctly as long as there are digits on both sides of the - # separator. So we are fine as long as we don't see something - # like "0.'3" (gcc 4.9.0 will not allow this literal). - if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head): - match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail) - collapsed += head + match_literal.group(1).replace("'", '') - elided = match_literal.group(2) - else: - second_quote = tail.find('\'') - if second_quote >= 0: - collapsed += head + "''" - elided = tail[second_quote + 1:] - else: - # Unmatched single quote - collapsed += elided - break - - return collapsed - - -def FindEndOfExpressionInLine(line, startpos, stack): - """Find the position just after the end of current parenthesized expression. - - Args: - line: a CleansedLines line. - startpos: start searching at this position. - stack: nesting stack at startpos. - - Returns: - On finding matching end: (index just after matching end, None) - On finding an unclosed expression: (-1, None) - Otherwise: (-1, new stack at end of this line) - """ - for i in xrange(startpos, len(line)): - char = line[i] - if char in '([{': - # Found start of parenthesized expression, push to expression stack - stack.append(char) - elif char == '<': - # Found potential start of template argument list - if i > 0 and line[i - 1] == '<': - # Left shift operator - if stack and stack[-1] == '<': - stack.pop() - if not stack: - return (-1, None) - elif i > 0 and Search(r'\boperator\s*$', line[0:i]): - # operator<, don't add to stack - continue - else: - # Tentative start of template argument list - stack.append('<') - elif char in ')]}': - # Found end of parenthesized expression. - # - # If we are currently expecting a matching '>', the pending '<' - # must have been an operator. Remove them from expression stack. - while stack and stack[-1] == '<': - stack.pop() - if not stack: - return (-1, None) - if ((stack[-1] == '(' and char == ')') or - (stack[-1] == '[' and char == ']') or - (stack[-1] == '{' and char == '}')): - stack.pop() - if not stack: - return (i + 1, None) - else: - # Mismatched parentheses - return (-1, None) - elif char == '>': - # Found potential end of template argument list. - - # Ignore "->" and operator functions - if (i > 0 and - (line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))): - continue - - # Pop the stack if there is a matching '<'. Otherwise, ignore - # this '>' since it must be an operator. - if stack: - if stack[-1] == '<': - stack.pop() - if not stack: - return (i + 1, None) - elif char == ';': - # Found something that look like end of statements. If we are currently - # expecting a '>', the matching '<' must have been an operator, since - # template argument list should not contain statements. - while stack and stack[-1] == '<': - stack.pop() - if not stack: - return (-1, None) - - # Did not find end of expression or unbalanced parentheses on this line - return (-1, stack) - - -def CloseExpression(clean_lines, linenum, pos): - """If input points to ( or { or [ or <, finds the position that closes it. - - If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the - linenum/pos that correspond to the closing of the expression. - - TODO(unknown): cpplint spends a fair bit of time matching parentheses. - Ideally we would want to index all opening and closing parentheses once - and have CloseExpression be just a simple lookup, but due to preprocessor - tricks, this is not so easy. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: A position on the line. - - Returns: - A tuple (line, linenum, pos) pointer *past* the closing brace, or - (line, len(lines), -1) if we never find a close. Note we ignore - strings and comments when matching; and the line we return is the - 'cleansed' line at linenum. - """ - - line = clean_lines.elided[linenum] - if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]): - return (line, clean_lines.NumLines(), -1) - - # Check first line - (end_pos, stack) = FindEndOfExpressionInLine(line, pos, []) - if end_pos > -1: - return (line, linenum, end_pos) - - # Continue scanning forward - while stack and linenum < clean_lines.NumLines() - 1: - linenum += 1 - line = clean_lines.elided[linenum] - (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack) - if end_pos > -1: - return (line, linenum, end_pos) - - # Did not find end of expression before end of file, give up - return (line, clean_lines.NumLines(), -1) - - -def FindStartOfExpressionInLine(line, endpos, stack): - """Find position at the matching start of current expression. - - This is almost the reverse of FindEndOfExpressionInLine, but note - that the input position and returned position differs by 1. - - Args: - line: a CleansedLines line. - endpos: start searching at this position. - stack: nesting stack at endpos. - - Returns: - On finding matching start: (index at matching start, None) - On finding an unclosed expression: (-1, None) - Otherwise: (-1, new stack at beginning of this line) - """ - i = endpos - while i >= 0: - char = line[i] - if char in ')]}': - # Found end of expression, push to expression stack - stack.append(char) - elif char == '>': - # Found potential end of template argument list. - # - # Ignore it if it's a "->" or ">=" or "operator>" - if (i > 0 and - (line[i - 1] == '-' or - Match(r'\s>=\s', line[i - 1:]) or - Search(r'\boperator\s*$', line[0:i]))): - i -= 1 - else: - stack.append('>') - elif char == '<': - # Found potential start of template argument list - if i > 0 and line[i - 1] == '<': - # Left shift operator - i -= 1 - else: - # If there is a matching '>', we can pop the expression stack. - # Otherwise, ignore this '<' since it must be an operator. - if stack and stack[-1] == '>': - stack.pop() - if not stack: - return (i, None) - elif char in '([{': - # Found start of expression. - # - # If there are any unmatched '>' on the stack, they must be - # operators. Remove those. - while stack and stack[-1] == '>': - stack.pop() - if not stack: - return (-1, None) - if ((char == '(' and stack[-1] == ')') or - (char == '[' and stack[-1] == ']') or - (char == '{' and stack[-1] == '}')): - stack.pop() - if not stack: - return (i, None) - else: - # Mismatched parentheses - return (-1, None) - elif char == ';': - # Found something that look like end of statements. If we are currently - # expecting a '<', the matching '>' must have been an operator, since - # template argument list should not contain statements. - while stack and stack[-1] == '>': - stack.pop() - if not stack: - return (-1, None) - - i -= 1 - - return (-1, stack) - - -def ReverseCloseExpression(clean_lines, linenum, pos): - """If input points to ) or } or ] or >, finds the position that opens it. - - If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the - linenum/pos that correspond to the opening of the expression. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: A position on the line. - - Returns: - A tuple (line, linenum, pos) pointer *at* the opening brace, or - (line, 0, -1) if we never find the matching opening brace. Note - we ignore strings and comments when matching; and the line we - return is the 'cleansed' line at linenum. - """ - line = clean_lines.elided[linenum] - if line[pos] not in ')}]>': - return (line, 0, -1) - - # Check last line - (start_pos, stack) = FindStartOfExpressionInLine(line, pos, []) - if start_pos > -1: - return (line, linenum, start_pos) - - # Continue scanning backward - while stack and linenum > 0: - linenum -= 1 - line = clean_lines.elided[linenum] - (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack) - if start_pos > -1: - return (line, linenum, start_pos) - - # Did not find start of expression before beginning of file, give up - return (line, 0, -1) - - -def CheckForCopyright(filename, lines, error): - """Logs an error if no Copyright message appears at the top of the file.""" - - # We'll say it should occur by line 10. Don't forget there's a - # dummy line at the front. - for line in xrange(1, min(len(lines), 11)): - if re.search(r'Copyright', lines[line], re.I): break - else: # means no copyright line was found - error(filename, 0, 'legal/copyright', 5, - 'No copyright message found. ' - 'You should have a line: "Copyright [year] "') - - -def GetIndentLevel(line): - """Return the number of leading spaces in line. - - Args: - line: A string to check. - - Returns: - An integer count of leading spaces, possibly zero. - """ - indent = Match(r'^( *)\S', line) - if indent: - return len(indent.group(1)) - else: - return 0 - - -def GetHeaderGuardCPPVariable(filename): - """Returns the CPP variable that should be used as a header guard. - - Args: - filename: The name of a C++ header file. - - Returns: - The CPP variable that should be used as a header guard in the - named file. - - """ - - # Restores original filename in case that cpplint is invoked from Emacs's - # flymake. - filename = re.sub(r'_flymake\.h$', '.h', filename) - filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename) - # Replace 'c++' with 'cpp'. - filename = filename.replace('C++', 'cpp').replace('c++', 'cpp') - - fileinfo = FileInfo(filename) - file_path_from_root = fileinfo.RepositoryName() - if _root: - file_path_from_root = re.sub('^' + _root + os.sep, '', file_path_from_root) - return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_' - - -def CheckForHeaderGuard(filename, clean_lines, error): - """Checks that the file contains a header guard. - - Logs an error if no #ifndef header guard is present. For other - headers, checks that the full pathname is used. - - Args: - filename: The name of the C++ header file. - clean_lines: A CleansedLines instance containing the file. - error: The function to call with any errors found. - """ - - # Don't check for header guards if there are error suppression - # comments somewhere in this file. - # - # Because this is silencing a warning for a nonexistent line, we - # only support the very specific NOLINT(build/header_guard) syntax, - # and not the general NOLINT or NOLINT(*) syntax. - raw_lines = clean_lines.lines_without_raw_strings - for i in raw_lines: - if Search(r'//\s*NOLINT\(build/header_guard\)', i): - return - - cppvar = GetHeaderGuardCPPVariable(filename) - - ifndef = '' - ifndef_linenum = 0 - define = '' - endif = '' - endif_linenum = 0 - for linenum, line in enumerate(raw_lines): - linesplit = line.split() - if len(linesplit) >= 2: - # find the first occurrence of #ifndef and #define, save arg - if not ifndef and linesplit[0] == '#ifndef': - # set ifndef to the header guard presented on the #ifndef line. - ifndef = linesplit[1] - ifndef_linenum = linenum - if not define and linesplit[0] == '#define': - define = linesplit[1] - # find the last occurrence of #endif, save entire line - if line.startswith('#endif'): - endif = line - endif_linenum = linenum - - if not ifndef or not define or ifndef != define: - error(filename, 0, 'build/header_guard', 5, - 'No #ifndef header guard found, suggested CPP variable is: %s' % - cppvar) - return - - # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__ - # for backward compatibility. - if ifndef != cppvar: - error_level = 0 - if ifndef != cppvar + '_': - error_level = 5 - - ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum, - error) - error(filename, ifndef_linenum, 'build/header_guard', error_level, - '#ifndef header guard has wrong style, please use: %s' % cppvar) - - # Check for "//" comments on endif line. - ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum, - error) - match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif) - if match: - if match.group(1) == '_': - # Issue low severity warning for deprecated double trailing underscore - error(filename, endif_linenum, 'build/header_guard', 0, - '#endif line should be "#endif // %s"' % cppvar) - return - - # Didn't find the corresponding "//" comment. If this file does not - # contain any "//" comments at all, it could be that the compiler - # only wants "/**/" comments, look for those instead. - no_single_line_comments = True - for i in xrange(1, len(raw_lines) - 1): - line = raw_lines[i] - if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line): - no_single_line_comments = False - break - - if no_single_line_comments: - match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif) - if match: - if match.group(1) == '_': - # Low severity warning for double trailing underscore - error(filename, endif_linenum, 'build/header_guard', 0, - '#endif line should be "#endif /* %s */"' % cppvar) - return - - # Didn't find anything - error(filename, endif_linenum, 'build/header_guard', 5, - '#endif line should be "#endif // %s"' % cppvar) - - -def CheckHeaderFileIncluded(filename, include_state, error): - """Logs an error if a .cc file does not include its header.""" - - # Do not check test files - if filename.endswith('_test.cc') or filename.endswith('_unittest.cc'): - return - - fileinfo = FileInfo(filename) - headerfile = filename[0:len(filename) - 2] + 'h' - if not os.path.exists(headerfile): - return - headername = FileInfo(headerfile).RepositoryName() - first_include = 0 - for section_list in include_state.include_list: - for f in section_list: - if headername in f[0] or f[0] in headername: - return - if not first_include: - first_include = f[1] - - error(filename, first_include, 'build/include', 5, - '%s should include its header file %s' % (fileinfo.RepositoryName(), - headername)) - - -def CheckForBadCharacters(filename, lines, error): - """Logs an error for each line containing bad characters. - - Two kinds of bad characters: - - 1. Unicode replacement characters: These indicate that either the file - contained invalid UTF-8 (likely) or Unicode replacement characters (which - it shouldn't). Note that it's possible for this to throw off line - numbering if the invalid UTF-8 occurred adjacent to a newline. - - 2. NUL bytes. These are problematic for some tools. - - Args: - filename: The name of the current file. - lines: An array of strings, each representing a line of the file. - error: The function to call with any errors found. - """ - for linenum, line in enumerate(lines): - if u'\ufffd' in line: - error(filename, linenum, 'readability/utf8', 5, - 'Line contains invalid UTF-8 (or Unicode replacement character).') - if '\0' in line: - error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.') - - -def CheckForNewlineAtEOF(filename, lines, error): - """Logs an error if there is no newline char at the end of the file. - - Args: - filename: The name of the current file. - lines: An array of strings, each representing a line of the file. - error: The function to call with any errors found. - """ - - # The array lines() was created by adding two newlines to the - # original file (go figure), then splitting on \n. - # To verify that the file ends in \n, we just have to make sure the - # last-but-two element of lines() exists and is empty. - if len(lines) < 3 or lines[-2]: - error(filename, len(lines) - 2, 'whitespace/ending_newline', 5, - 'Could not find a newline character at the end of the file.') - - -def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error): - """Logs an error if we see /* ... */ or "..." that extend past one line. - - /* ... */ comments are legit inside macros, for one line. - Otherwise, we prefer // comments, so it's ok to warn about the - other. Likewise, it's ok for strings to extend across multiple - lines, as long as a line continuation character (backslash) - terminates each line. Although not currently prohibited by the C++ - style guide, it's ugly and unnecessary. We don't do well with either - in this lint program, so we warn about both. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Remove all \\ (escaped backslashes) from the line. They are OK, and the - # second (escaped) slash may trigger later \" detection erroneously. - line = line.replace('\\\\', '') - - if line.count('/*') > line.count('*/'): - error(filename, linenum, 'readability/multiline_comment', 5, - 'Complex multi-line /*...*/-style comment found. ' - 'Lint may give bogus warnings. ' - 'Consider replacing these with //-style comments, ' - 'with #if 0...#endif, ' - 'or with more clearly structured multi-line comments.') - - if (line.count('"') - line.count('\\"')) % 2: - error(filename, linenum, 'readability/multiline_string', 5, - 'Multi-line string ("...") found. This lint script doesn\'t ' - 'do well with such strings, and may give bogus warnings. ' - 'Use C++11 raw strings or concatenation instead.') - - -# (non-threadsafe name, thread-safe alternative, validation pattern) -# -# The validation pattern is used to eliminate false positives such as: -# _rand(); // false positive due to substring match. -# ->rand(); // some member function rand(). -# ACMRandom rand(seed); // some variable named rand. -# ISAACRandom rand(); // another variable named rand. -# -# Basically we require the return value of these functions to be used -# in some expression context on the same line by matching on some -# operator before the function name. This eliminates constructors and -# member function calls. -_UNSAFE_FUNC_PREFIX = r'(?:[-+*/=%^&|(<]\s*|>\s+)' -_THREADING_LIST = ( - ('asctime(', 'asctime_r(', _UNSAFE_FUNC_PREFIX + r'asctime\([^)]+\)'), - ('ctime(', 'ctime_r(', _UNSAFE_FUNC_PREFIX + r'ctime\([^)]+\)'), - ('getgrgid(', 'getgrgid_r(', _UNSAFE_FUNC_PREFIX + r'getgrgid\([^)]+\)'), - ('getgrnam(', 'getgrnam_r(', _UNSAFE_FUNC_PREFIX + r'getgrnam\([^)]+\)'), - ('getlogin(', 'getlogin_r(', _UNSAFE_FUNC_PREFIX + r'getlogin\(\)'), - ('getpwnam(', 'getpwnam_r(', _UNSAFE_FUNC_PREFIX + r'getpwnam\([^)]+\)'), - ('getpwuid(', 'getpwuid_r(', _UNSAFE_FUNC_PREFIX + r'getpwuid\([^)]+\)'), - ('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\([^)]+\)'), - ('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\([^)]+\)'), - ('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\(\)'), - ('strtok(', 'strtok_r(', - _UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'), - ('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'), - ) - - -def CheckPosixThreading(filename, clean_lines, linenum, error): - """Checks for calls to thread-unsafe functions. - - Much code has been originally written without consideration of - multi-threading. Also, engineers are relying on their old experience; - they have learned posix before threading extensions were added. These - tests guide the engineers to use thread-safe functions (when using - posix directly). - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST: - # Additional pattern matching check to confirm that this is the - # function we are looking for - if Search(pattern, line): - error(filename, linenum, 'runtime/threadsafe_fn', 2, - 'Consider using ' + multithread_safe_func + - '...) instead of ' + single_thread_func + - '...) for improved thread safety.') - - -def CheckVlogArguments(filename, clean_lines, linenum, error): - """Checks that VLOG() is only used for defining a logging level. - - For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and - VLOG(FATAL) are not. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line): - error(filename, linenum, 'runtime/vlog', 5, - 'VLOG() should be used with numeric verbosity level. ' - 'Use LOG() if you want symbolic severity levels.') - -# Matches invalid increment: *count++, which moves pointer instead of -# incrementing a value. -_RE_PATTERN_INVALID_INCREMENT = re.compile( - r'^\s*\*\w+(\+\+|--);') - - -def CheckInvalidIncrement(filename, clean_lines, linenum, error): - """Checks for invalid increment *count++. - - For example following function: - void increment_counter(int* count) { - *count++; - } - is invalid, because it effectively does count++, moving pointer, and should - be replaced with ++*count, (*count)++ or *count += 1. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - if _RE_PATTERN_INVALID_INCREMENT.match(line): - error(filename, linenum, 'runtime/invalid_increment', 5, - 'Changing pointer instead of value (or unused value of operator*).') - - -def IsMacroDefinition(clean_lines, linenum): - if Search(r'^#define', clean_lines[linenum]): - return True - - if linenum > 0 and Search(r'\\$', clean_lines[linenum - 1]): - return True - - return False - - -def IsForwardClassDeclaration(clean_lines, linenum): - return Match(r'^\s*(\btemplate\b)*.*class\s+\w+;\s*$', clean_lines[linenum]) - - -class _BlockInfo(object): - """Stores information about a generic block of code.""" - - def __init__(self, seen_open_brace): - self.seen_open_brace = seen_open_brace - self.open_parentheses = 0 - self.inline_asm = _NO_ASM - self.check_namespace_indentation = False - - def CheckBegin(self, filename, clean_lines, linenum, error): - """Run checks that applies to text up to the opening brace. - - This is mostly for checking the text after the class identifier - and the "{", usually where the base class is specified. For other - blocks, there isn't much to check, so we always pass. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - pass - - def CheckEnd(self, filename, clean_lines, linenum, error): - """Run checks that applies to text after the closing brace. - - This is mostly used for checking end of namespace comments. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - pass - - def IsBlockInfo(self): - """Returns true if this block is a _BlockInfo. - - This is convenient for verifying that an object is an instance of - a _BlockInfo, but not an instance of any of the derived classes. - - Returns: - True for this class, False for derived classes. - """ - return self.__class__ == _BlockInfo - - -class _ExternCInfo(_BlockInfo): - """Stores information about an 'extern "C"' block.""" - - def __init__(self): - _BlockInfo.__init__(self, True) - - -class _ClassInfo(_BlockInfo): - """Stores information about a class.""" - - def __init__(self, name, class_or_struct, clean_lines, linenum): - _BlockInfo.__init__(self, False) - self.name = name - self.starting_linenum = linenum - self.is_derived = False - self.check_namespace_indentation = True - if class_or_struct == 'struct': - self.access = 'public' - self.is_struct = True - else: - self.access = 'private' - self.is_struct = False - - # Remember initial indentation level for this class. Using raw_lines here - # instead of elided to account for leading comments. - self.class_indent = GetIndentLevel(clean_lines.raw_lines[linenum]) - - # Try to find the end of the class. This will be confused by things like: - # class A { - # } *x = { ... - # - # But it's still good enough for CheckSectionSpacing. - self.last_line = 0 - depth = 0 - for i in range(linenum, clean_lines.NumLines()): - line = clean_lines.elided[i] - depth += line.count('{') - line.count('}') - if not depth: - self.last_line = i - break - - def CheckBegin(self, filename, clean_lines, linenum, error): - # Look for a bare ':' - if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]): - self.is_derived = True - - def CheckEnd(self, filename, clean_lines, linenum, error): - # If there is a DISALLOW macro, it should appear near the end of - # the class. - seen_last_thing_in_class = False - for i in xrange(linenum - 1, self.starting_linenum, -1): - match = Search( - r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' + - self.name + r'\)', - clean_lines.elided[i]) - if match: - if seen_last_thing_in_class: - error(filename, i, 'readability/constructors', 3, - match.group(1) + ' should be the last thing in the class') - break - - if not Match(r'^\s*$', clean_lines.elided[i]): - seen_last_thing_in_class = True - - # Check that closing brace is aligned with beginning of the class. - # Only do this if the closing brace is indented by only whitespaces. - # This means we will not check single-line class definitions. - indent = Match(r'^( *)\}', clean_lines.elided[linenum]) - if indent and len(indent.group(1)) != self.class_indent: - if self.is_struct: - parent = 'struct ' + self.name - else: - parent = 'class ' + self.name - error(filename, linenum, 'whitespace/indent', 3, - 'Closing brace should be aligned with beginning of %s' % parent) - - -class _NamespaceInfo(_BlockInfo): - """Stores information about a namespace.""" - - def __init__(self, name, linenum): - _BlockInfo.__init__(self, False) - self.name = name or '' - self.starting_linenum = linenum - self.check_namespace_indentation = True - - def CheckEnd(self, filename, clean_lines, linenum, error): - """Check end of namespace comments.""" - line = clean_lines.raw_lines[linenum] - - # Check how many lines is enclosed in this namespace. Don't issue - # warning for missing namespace comments if there aren't enough - # lines. However, do apply checks if there is already an end of - # namespace comment and it's incorrect. - # - # TODO(unknown): We always want to check end of namespace comments - # if a namespace is large, but sometimes we also want to apply the - # check if a short namespace contained nontrivial things (something - # other than forward declarations). There is currently no logic on - # deciding what these nontrivial things are, so this check is - # triggered by namespace size only, which works most of the time. - if (linenum - self.starting_linenum < 10 - and not Match(r'};*\s*(//|/\*).*\bnamespace\b', line)): - return - - # Look for matching comment at end of namespace. - # - # Note that we accept C style "/* */" comments for terminating - # namespaces, so that code that terminate namespaces inside - # preprocessor macros can be cpplint clean. - # - # We also accept stuff like "// end of namespace ." with the - # period at the end. - # - # Besides these, we don't accept anything else, otherwise we might - # get false negatives when existing comment is a substring of the - # expected namespace. - if self.name: - # Named namespace - if not Match((r'};*\s*(//|/\*).*\bnamespace\s+' + re.escape(self.name) + - r'[\*/\.\\\s]*$'), - line): - error(filename, linenum, 'readability/namespace', 5, - 'Namespace should be terminated with "// namespace %s"' % - self.name) - else: - # Anonymous namespace - if not Match(r'};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line): - # If "// namespace anonymous" or "// anonymous namespace (more text)", - # mention "// anonymous namespace" as an acceptable form - if Match(r'}.*\b(namespace anonymous|anonymous namespace)\b', line): - error(filename, linenum, 'readability/namespace', 5, - 'Anonymous namespace should be terminated with "// namespace"' - ' or "// anonymous namespace"') - else: - error(filename, linenum, 'readability/namespace', 5, - 'Anonymous namespace should be terminated with "// namespace"') - - -class _PreprocessorInfo(object): - """Stores checkpoints of nesting stacks when #if/#else is seen.""" - - def __init__(self, stack_before_if): - # The entire nesting stack before #if - self.stack_before_if = stack_before_if - - # The entire nesting stack up to #else - self.stack_before_else = [] - - # Whether we have already seen #else or #elif - self.seen_else = False - - -class NestingState(object): - """Holds states related to parsing braces.""" - - def __init__(self): - # Stack for tracking all braces. An object is pushed whenever we - # see a "{", and popped when we see a "}". Only 3 types of - # objects are possible: - # - _ClassInfo: a class or struct. - # - _NamespaceInfo: a namespace. - # - _BlockInfo: some other type of block. - self.stack = [] - - # Top of the previous stack before each Update(). - # - # Because the nesting_stack is updated at the end of each line, we - # had to do some convoluted checks to find out what is the current - # scope at the beginning of the line. This check is simplified by - # saving the previous top of nesting stack. - # - # We could save the full stack, but we only need the top. Copying - # the full nesting stack would slow down cpplint by ~10%. - self.previous_stack_top = [] - - # Stack of _PreprocessorInfo objects. - self.pp_stack = [] - - def SeenOpenBrace(self): - """Check if we have seen the opening brace for the innermost block. - - Returns: - True if we have seen the opening brace, False if the innermost - block is still expecting an opening brace. - """ - return (not self.stack) or self.stack[-1].seen_open_brace - - def InNamespaceBody(self): - """Check if we are currently one level inside a namespace body. - - Returns: - True if top of the stack is a namespace block, False otherwise. - """ - return self.stack and isinstance(self.stack[-1], _NamespaceInfo) - - def InExternC(self): - """Check if we are currently one level inside an 'extern "C"' block. - - Returns: - True if top of the stack is an extern block, False otherwise. - """ - return self.stack and isinstance(self.stack[-1], _ExternCInfo) - - def InClassDeclaration(self): - """Check if we are currently one level inside a class or struct declaration. - - Returns: - True if top of the stack is a class/struct, False otherwise. - """ - return self.stack and isinstance(self.stack[-1], _ClassInfo) - - def InAsmBlock(self): - """Check if we are currently one level inside an inline ASM block. - - Returns: - True if the top of the stack is a block containing inline ASM. - """ - return self.stack and self.stack[-1].inline_asm != _NO_ASM - - def InTemplateArgumentList(self, clean_lines, linenum, pos): - """Check if current position is inside template argument list. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: position just after the suspected template argument. - Returns: - True if (linenum, pos) is inside template arguments. - """ - while linenum < clean_lines.NumLines(): - # Find the earliest character that might indicate a template argument - line = clean_lines.elided[linenum] - match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:]) - if not match: - linenum += 1 - pos = 0 - continue - token = match.group(1) - pos += len(match.group(0)) - - # These things do not look like template argument list: - # class Suspect { - # class Suspect x; } - if token in ('{', '}', ';'): return False - - # These things look like template argument list: - # template - # template - # template - # template - if token in ('>', '=', '[', ']', '.'): return True - - # Check if token is an unmatched '<'. - # If not, move on to the next character. - if token != '<': - pos += 1 - if pos >= len(line): - linenum += 1 - pos = 0 - continue - - # We can't be sure if we just find a single '<', and need to - # find the matching '>'. - (_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1) - if end_pos < 0: - # Not sure if template argument list or syntax error in file - return False - linenum = end_line - pos = end_pos - return False - - def UpdatePreprocessor(self, line): - """Update preprocessor stack. - - We need to handle preprocessors due to classes like this: - #ifdef SWIG - struct ResultDetailsPageElementExtensionPoint { - #else - struct ResultDetailsPageElementExtensionPoint : public Extension { - #endif - - We make the following assumptions (good enough for most files): - - Preprocessor condition evaluates to true from #if up to first - #else/#elif/#endif. - - - Preprocessor condition evaluates to false from #else/#elif up - to #endif. We still perform lint checks on these lines, but - these do not affect nesting stack. - - Args: - line: current line to check. - """ - if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line): - # Beginning of #if block, save the nesting stack here. The saved - # stack will allow us to restore the parsing state in the #else case. - self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack))) - elif Match(r'^\s*#\s*(else|elif)\b', line): - # Beginning of #else block - if self.pp_stack: - if not self.pp_stack[-1].seen_else: - # This is the first #else or #elif block. Remember the - # whole nesting stack up to this point. This is what we - # keep after the #endif. - self.pp_stack[-1].seen_else = True - self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack) - - # Restore the stack to how it was before the #if - self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if) - else: - # TODO(unknown): unexpected #else, issue warning? - pass - elif Match(r'^\s*#\s*endif\b', line): - # End of #if or #else blocks. - if self.pp_stack: - # If we saw an #else, we will need to restore the nesting - # stack to its former state before the #else, otherwise we - # will just continue from where we left off. - if self.pp_stack[-1].seen_else: - # Here we can just use a shallow copy since we are the last - # reference to it. - self.stack = self.pp_stack[-1].stack_before_else - # Drop the corresponding #if - self.pp_stack.pop() - else: - # TODO(unknown): unexpected #endif, issue warning? - pass - - # TODO(unknown): Update() is too long, but we will refactor later. - def Update(self, filename, clean_lines, linenum, error): - """Update nesting state with current line. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Remember top of the previous nesting stack. - # - # The stack is always pushed/popped and not modified in place, so - # we can just do a shallow copy instead of copy.deepcopy. Using - # deepcopy would slow down cpplint by ~28%. - if self.stack: - self.previous_stack_top = self.stack[-1] - else: - self.previous_stack_top = None - - # Update pp_stack - self.UpdatePreprocessor(line) - - # Count parentheses. This is to avoid adding struct arguments to - # the nesting stack. - if self.stack: - inner_block = self.stack[-1] - depth_change = line.count('(') - line.count(')') - inner_block.open_parentheses += depth_change - - # Also check if we are starting or ending an inline assembly block. - if inner_block.inline_asm in (_NO_ASM, _END_ASM): - if (depth_change != 0 and - inner_block.open_parentheses == 1 and - _MATCH_ASM.match(line)): - # Enter assembly block - inner_block.inline_asm = _INSIDE_ASM - else: - # Not entering assembly block. If previous line was _END_ASM, - # we will now shift to _NO_ASM state. - inner_block.inline_asm = _NO_ASM - elif (inner_block.inline_asm == _INSIDE_ASM and - inner_block.open_parentheses == 0): - # Exit assembly block - inner_block.inline_asm = _END_ASM - - # Consume namespace declaration at the beginning of the line. Do - # this in a loop so that we catch same line declarations like this: - # namespace proto2 { namespace bridge { class MessageSet; } } - while True: - # Match start of namespace. The "\b\s*" below catches namespace - # declarations even if it weren't followed by a whitespace, this - # is so that we don't confuse our namespace checker. The - # missing spaces will be flagged by CheckSpacing. - namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line) - if not namespace_decl_match: - break - - new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum) - self.stack.append(new_namespace) - - line = namespace_decl_match.group(2) - if line.find('{') != -1: - new_namespace.seen_open_brace = True - line = line[line.find('{') + 1:] - - # Look for a class declaration in whatever is left of the line - # after parsing namespaces. The regexp accounts for decorated classes - # such as in: - # class LOCKABLE API Object { - # }; - class_decl_match = Match( - r'^(\s*(?:template\s*<[\w\s<>,:]*>\s*)?' - r'(class|struct)\s+(?:[A-Z_]+\s+)*(\w+(?:::\w+)*))' - r'(.*)$', line) - if (class_decl_match and - (not self.stack or self.stack[-1].open_parentheses == 0)): - # We do not want to accept classes that are actually template arguments: - # template , - # template class Ignore3> - # void Function() {}; - # - # To avoid template argument cases, we scan forward and look for - # an unmatched '>'. If we see one, assume we are inside a - # template argument list. - end_declaration = len(class_decl_match.group(1)) - if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration): - self.stack.append(_ClassInfo( - class_decl_match.group(3), class_decl_match.group(2), - clean_lines, linenum)) - line = class_decl_match.group(4) - - # If we have not yet seen the opening brace for the innermost block, - # run checks here. - if not self.SeenOpenBrace(): - self.stack[-1].CheckBegin(filename, clean_lines, linenum, error) - - # Update access control if we are inside a class/struct - if self.stack and isinstance(self.stack[-1], _ClassInfo): - classinfo = self.stack[-1] - access_match = Match( - r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?' - r':(?:[^:]|$)', - line) - if access_match: - classinfo.access = access_match.group(2) - - # Check that access keywords are indented +1 space. Skip this - # check if the keywords are not preceded by whitespaces. - indent = access_match.group(1) - if (len(indent) != classinfo.class_indent + 1 and - Match(r'^\s*$', indent)): - if classinfo.is_struct: - parent = 'struct ' + classinfo.name - else: - parent = 'class ' + classinfo.name - slots = '' - if access_match.group(3): - slots = access_match.group(3) - error(filename, linenum, 'whitespace/indent', 3, - '%s%s: should be indented +1 space inside %s' % ( - access_match.group(2), slots, parent)) - - # Consume braces or semicolons from what's left of the line - while True: - # Match first brace, semicolon, or closed parenthesis. - matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line) - if not matched: - break - - token = matched.group(1) - if token == '{': - # If namespace or class hasn't seen a opening brace yet, mark - # namespace/class head as complete. Push a new block onto the - # stack otherwise. - if not self.SeenOpenBrace(): - self.stack[-1].seen_open_brace = True - elif Match(r'^extern\s*"[^"]*"\s*\{', line): - self.stack.append(_ExternCInfo()) - else: - self.stack.append(_BlockInfo(True)) - if _MATCH_ASM.match(line): - self.stack[-1].inline_asm = _BLOCK_ASM - - elif token == ';' or token == ')': - # If we haven't seen an opening brace yet, but we already saw - # a semicolon, this is probably a forward declaration. Pop - # the stack for these. - # - # Similarly, if we haven't seen an opening brace yet, but we - # already saw a closing parenthesis, then these are probably - # function arguments with extra "class" or "struct" keywords. - # Also pop these stack for these. - if not self.SeenOpenBrace(): - self.stack.pop() - else: # token == '}' - # Perform end of block checks and pop the stack. - if self.stack: - self.stack[-1].CheckEnd(filename, clean_lines, linenum, error) - self.stack.pop() - line = matched.group(2) - - def InnermostClass(self): - """Get class info on the top of the stack. - - Returns: - A _ClassInfo object if we are inside a class, or None otherwise. - """ - for i in range(len(self.stack), 0, -1): - classinfo = self.stack[i - 1] - if isinstance(classinfo, _ClassInfo): - return classinfo - return None - - def CheckCompletedBlocks(self, filename, error): - """Checks that all classes and namespaces have been completely parsed. - - Call this when all lines in a file have been processed. - Args: - filename: The name of the current file. - error: The function to call with any errors found. - """ - # Note: This test can result in false positives if #ifdef constructs - # get in the way of brace matching. See the testBuildClass test in - # cpplint_unittest.py for an example of this. - for obj in self.stack: - if isinstance(obj, _ClassInfo): - error(filename, obj.starting_linenum, 'build/class', 5, - 'Failed to find complete declaration of class %s' % - obj.name) - elif isinstance(obj, _NamespaceInfo): - error(filename, obj.starting_linenum, 'build/namespaces', 5, - 'Failed to find complete declaration of namespace %s' % - obj.name) - - -def CheckForNonStandardConstructs(filename, clean_lines, linenum, - nesting_state, error): - r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2. - - Complain about several constructs which gcc-2 accepts, but which are - not standard C++. Warning about these in lint is one way to ease the - transition to new compilers. - - put storage class first (e.g. "static const" instead of "const static"). - - "%lld" instead of %qd" in printf-type functions. - - "%1$d" is non-standard in printf-type functions. - - "\%" is an undefined character escape sequence. - - text after #endif is not allowed. - - invalid inner-style forward declaration. - - >? and ?= and )\?=?\s*(\w+|[+-]?\d+)(\.\d*)?', - line): - error(filename, linenum, 'build/deprecated', 3, - '>? and ))?' - # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;' - error(filename, linenum, 'runtime/member_string_references', 2, - 'const string& members are dangerous. It is much better to use ' - 'alternatives, such as pointers or simple constants.') - - # Everything else in this function operates on class declarations. - # Return early if the top of the nesting stack is not a class, or if - # the class head is not completed yet. - classinfo = nesting_state.InnermostClass() - if not classinfo or not classinfo.seen_open_brace: - return - - # The class may have been declared with namespace or classname qualifiers. - # The constructor and destructor will not have those qualifiers. - base_classname = classinfo.name.split('::')[-1] - - # Look for single-argument constructors that aren't marked explicit. - # Technically a valid construct, but against style. Also look for - # non-single-argument constructors which are also technically valid, but - # strongly suggest something is wrong. - explicit_constructor_match = Match( - r'\s+(?:inline\s+)?(explicit\s+)?(?:inline\s+)?%s\s*' - r'\(((?:[^()]|\([^()]*\))*)\)' - % re.escape(base_classname), - line) - - if explicit_constructor_match: - is_marked_explicit = explicit_constructor_match.group(1) - - if not explicit_constructor_match.group(2): - constructor_args = [] - else: - constructor_args = explicit_constructor_match.group(2).split(',') - - # collapse arguments so that commas in template parameter lists and function - # argument parameter lists don't split arguments in two - i = 0 - while i < len(constructor_args): - constructor_arg = constructor_args[i] - while (constructor_arg.count('<') > constructor_arg.count('>') or - constructor_arg.count('(') > constructor_arg.count(')')): - constructor_arg += ',' + constructor_args[i + 1] - del constructor_args[i + 1] - constructor_args[i] = constructor_arg - i += 1 - - defaulted_args = [arg for arg in constructor_args if '=' in arg] - noarg_constructor = (not constructor_args or # empty arg list - # 'void' arg specifier - (len(constructor_args) == 1 and - constructor_args[0].strip() == 'void')) - onearg_constructor = ((len(constructor_args) == 1 and # exactly one arg - not noarg_constructor) or - # all but at most one arg defaulted - (len(constructor_args) >= 1 and - not noarg_constructor and - len(defaulted_args) >= len(constructor_args) - 1)) - initializer_list_constructor = bool( - onearg_constructor and - Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0])) - copy_constructor = bool( - onearg_constructor and - Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&' - % re.escape(base_classname), constructor_args[0].strip())) - - if (not is_marked_explicit and - onearg_constructor and - not initializer_list_constructor and - not copy_constructor): - if defaulted_args: - error(filename, linenum, 'runtime/explicit', 5, - 'Constructors callable with one argument ' - 'should be marked explicit.') - else: - error(filename, linenum, 'runtime/explicit', 5, - 'Single-parameter constructors should be marked explicit.') - elif is_marked_explicit and not onearg_constructor: - if noarg_constructor: - error(filename, linenum, 'runtime/explicit', 5, - 'Zero-parameter constructors should not be marked explicit.') - else: - error(filename, linenum, 'runtime/explicit', 0, - 'Constructors that require multiple arguments ' - 'should not be marked explicit.') - - -def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error): - """Checks for the correctness of various spacing around function calls. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Since function calls often occur inside if/for/while/switch - # expressions - which have their own, more liberal conventions - we - # first see if we should be looking inside such an expression for a - # function call, to which we can apply more strict standards. - fncall = line # if there's no control flow construct, look at whole line - for pattern in (r'\bif\s*\((.*)\)\s*{', - r'\bfor\s*\((.*)\)\s*{', - r'\bwhile\s*\((.*)\)\s*[{;]', - r'\bswitch\s*\((.*)\)\s*{'): - match = Search(pattern, line) - if match: - fncall = match.group(1) # look inside the parens for function calls - break - - # Except in if/for/while/switch, there should never be space - # immediately inside parens (eg "f( 3, 4 )"). We make an exception - # for nested parens ( (a+b) + c ). Likewise, there should never be - # a space before a ( when it's a function argument. I assume it's a - # function argument when the char before the whitespace is legal in - # a function name (alnum + _) and we're not starting a macro. Also ignore - # pointers and references to arrays and functions coz they're too tricky: - # we use a very simple way to recognize these: - # " (something)(maybe-something)" or - # " (something)(maybe-something," or - # " (something)[something]" - # Note that we assume the contents of [] to be short enough that - # they'll never need to wrap. - if ( # Ignore control structures. - not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b', - fncall) and - # Ignore pointers/references to functions. - not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and - # Ignore pointers/references to arrays. - not Search(r' \([^)]+\)\[[^\]]+\]', fncall)): - if Search(r'\w\s*\(\s(?!\s*\\$)', fncall): # a ( used for a fn call - error(filename, linenum, 'whitespace/parens', 4, - 'Extra space after ( in function call') - elif Search(r'\(\s+(?!(\s*\\)|\()', fncall): - error(filename, linenum, 'whitespace/parens', 2, - 'Extra space after (') - if (Search(r'\w\s+\(', fncall) and - not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and - not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and - not Search(r'\bcase\s+\(', fncall)): - # TODO(unknown): Space after an operator function seem to be a common - # error, silence those for now by restricting them to highest verbosity. - if Search(r'\boperator_*\b', line): - error(filename, linenum, 'whitespace/parens', 0, - 'Extra space before ( in function call') - else: - error(filename, linenum, 'whitespace/parens', 4, - 'Extra space before ( in function call') - # If the ) is followed only by a newline or a { + newline, assume it's - # part of a control statement (if/while/etc), and don't complain - if Search(r'[^)]\s+\)\s*[^{\s]', fncall): - # If the closing parenthesis is preceded by only whitespaces, - # try to give a more descriptive error message. - if Search(r'^\s+\)', fncall): - error(filename, linenum, 'whitespace/parens', 2, - 'Closing ) should be moved to the previous line') - else: - error(filename, linenum, 'whitespace/parens', 2, - 'Extra space before )') - - -def IsBlankLine(line): - """Returns true if the given line is blank. - - We consider a line to be blank if the line is empty or consists of - only white spaces. - - Args: - line: A line of a string. - - Returns: - True, if the given line is blank. - """ - return not line or line.isspace() - - -def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line, - error): - is_namespace_indent_item = ( - len(nesting_state.stack) > 1 and - nesting_state.stack[-1].check_namespace_indentation and - isinstance(nesting_state.previous_stack_top, _NamespaceInfo) and - nesting_state.previous_stack_top == nesting_state.stack[-2]) - - if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item, - clean_lines.elided, line): - CheckItemIndentationInNamespace(filename, clean_lines.elided, - line, error) - - -def CheckForFunctionLengths(filename, clean_lines, linenum, - function_state, error): - """Reports for long function bodies. - - For an overview why this is done, see: - http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions - - Uses a simplistic algorithm assuming other style guidelines - (especially spacing) are followed. - Only checks unindented functions, so class members are unchecked. - Trivial bodies are unchecked, so constructors with huge initializer lists - may be missed. - Blank/comment lines are not counted so as to avoid encouraging the removal - of vertical space and comments just to get through a lint check. - NOLINT *on the last line of a function* disables this check. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - function_state: Current function name and lines in body so far. - error: The function to call with any errors found. - """ - lines = clean_lines.lines - line = lines[linenum] - joined_line = '' - - starting_func = False - regexp = r'(\w(\w|::|\*|\&|\s)*)\(' # decls * & space::name( ... - match_result = Match(regexp, line) - if match_result: - # If the name is all caps and underscores, figure it's a macro and - # ignore it, unless it's TEST or TEST_F. - function_name = match_result.group(1).split()[-1] - if function_name == 'TEST' or function_name == 'TEST_F' or ( - not Match(r'[A-Z_]+$', function_name)): - starting_func = True - - if starting_func: - body_found = False - for start_linenum in xrange(linenum, clean_lines.NumLines()): - start_line = lines[start_linenum] - joined_line += ' ' + start_line.lstrip() - if Search(r'(;|})', start_line): # Declarations and trivial functions - body_found = True - break # ... ignore - elif Search(r'{', start_line): - body_found = True - function = Search(r'((\w|:)*)\(', line).group(1) - if Match(r'TEST', function): # Handle TEST... macros - parameter_regexp = Search(r'(\(.*\))', joined_line) - if parameter_regexp: # Ignore bad syntax - function += parameter_regexp.group(1) - else: - function += '()' - function_state.Begin(function) - break - if not body_found: - # No body for the function (or evidence of a non-function) was found. - error(filename, linenum, 'readability/fn_size', 5, - 'Lint failed to find start of function body.') - elif Match(r'^\}\s*$', line): # function end - function_state.Check(error, filename, linenum) - function_state.End() - elif not Match(r'^\s*$', line): - function_state.Count() # Count non-blank/non-comment lines. - - -_RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?') - - -def CheckComment(line, filename, linenum, next_line_start, error): - """Checks for common mistakes in comments. - - Args: - line: The line in question. - filename: The name of the current file. - linenum: The number of the line to check. - next_line_start: The first non-whitespace column of the next line. - error: The function to call with any errors found. - """ - commentpos = line.find('//') - if commentpos != -1: - # Check if the // may be in quotes. If so, ignore it - # Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison - if (line.count('"', 0, commentpos) - - line.count('\\"', 0, commentpos)) % 2 == 0: # not in quotes - # Allow one space for new scopes, two spaces otherwise: - if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos) and - ((commentpos >= 1 and - line[commentpos-1] not in string.whitespace) or - (commentpos >= 2 and - line[commentpos-2] not in string.whitespace))): - error(filename, linenum, 'whitespace/comments', 2, - 'At least two spaces is best between code and comments') - - # Checks for common mistakes in TODO comments. - comment = line[commentpos:] - match = _RE_PATTERN_TODO.match(comment) - if match: - # One whitespace is correct; zero whitespace is handled elsewhere. - leading_whitespace = match.group(1) - if len(leading_whitespace) > 1: - error(filename, linenum, 'whitespace/todo', 2, - 'Too many spaces before TODO') - - username = match.group(2) - if not username: - error(filename, linenum, 'readability/todo', 2, - 'Missing username in TODO; it should look like ' - '"// TODO(my_username): Stuff."') - - middle_whitespace = match.group(3) - # Comparisons made explicit for correctness -- pylint: disable=g-explicit-bool-comparison - if middle_whitespace != ' ' and middle_whitespace != '': - error(filename, linenum, 'whitespace/todo', 2, - 'TODO(my_username) should be followed by a space') - - # If the comment contains an alphanumeric character, there - # should be a space somewhere between it and the // unless - # it's a /// or //! Doxygen comment. - if (Match(r'//[^ ]*\w', comment) and - not Match(r'(///|//\!)(\s+|$)', comment)): - error(filename, linenum, 'whitespace/comments', 4, - 'Should have a space between // and comment') - - -def CheckAccess(filename, clean_lines, linenum, nesting_state, error): - """Checks for improper use of DISALLOW* macros. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] # get rid of comments and strings - - matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|' - r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line) - if not matched: - return - if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo): - if nesting_state.stack[-1].access != 'private': - error(filename, linenum, 'readability/constructors', 3, - '%s must be in the private: section' % matched.group(1)) - - else: - # Found DISALLOW* macro outside a class declaration, or perhaps it - # was used inside a function when it should have been part of the - # class declaration. We could issue a warning here, but it - # probably resulted in a compiler error already. - pass - - -def CheckSpacing(filename, clean_lines, linenum, nesting_state, error): - """Checks for the correctness of various spacing issues in the code. - - Things we check for: spaces around operators, spaces after - if/for/while/switch, no spaces around parens in function calls, two - spaces between code and comment, don't start a block with a blank - line, don't end a function with a blank line, don't add a blank line - after public/protected/private, don't have too many blank lines in a row. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - - # Don't use "elided" lines here, otherwise we can't check commented lines. - # Don't want to use "raw" either, because we don't want to check inside C++11 - # raw strings, - raw = clean_lines.lines_without_raw_strings - line = raw[linenum] - - # Before nixing comments, check if the line is blank for no good - # reason. This includes the first line after a block is opened, and - # blank lines at the end of a function (ie, right before a line like '}' - # - # Skip all the blank line checks if we are immediately inside a - # namespace body. In other words, don't issue blank line warnings - # for this block: - # namespace { - # - # } - # - # A warning about missing end of namespace comments will be issued instead. - # - # Also skip blank line checks for 'extern "C"' blocks, which are formatted - # like namespaces. - if (IsBlankLine(line) and - not nesting_state.InNamespaceBody() and - not nesting_state.InExternC()): - elided = clean_lines.elided - prev_line = elided[linenum - 1] - prevbrace = prev_line.rfind('{') - # TODO(unknown): Don't complain if line before blank line, and line after, - # both start with alnums and are indented the same amount. - # This ignores whitespace at the start of a namespace block - # because those are not usually indented. - if prevbrace != -1 and prev_line[prevbrace:].find('}') == -1: - # OK, we have a blank line at the start of a code block. Before we - # complain, we check if it is an exception to the rule: The previous - # non-empty line has the parameters of a function header that are indented - # 4 spaces (because they did not fit in a 80 column line when placed on - # the same line as the function name). We also check for the case where - # the previous line is indented 6 spaces, which may happen when the - # initializers of a constructor do not fit into a 80 column line. - exception = False - if Match(r' {6}\w', prev_line): # Initializer list? - # We are looking for the opening column of initializer list, which - # should be indented 4 spaces to cause 6 space indentation afterwards. - search_position = linenum-2 - while (search_position >= 0 - and Match(r' {6}\w', elided[search_position])): - search_position -= 1 - exception = (search_position >= 0 - and elided[search_position][:5] == ' :') - else: - # Search for the function arguments or an initializer list. We use a - # simple heuristic here: If the line is indented 4 spaces; and we have a - # closing paren, without the opening paren, followed by an opening brace - # or colon (for initializer lists) we assume that it is the last line of - # a function header. If we have a colon indented 4 spaces, it is an - # initializer list. - exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)', - prev_line) - or Match(r' {4}:', prev_line)) - - if not exception: - error(filename, linenum, 'whitespace/blank_line', 2, - 'Redundant blank line at the start of a code block ' - 'should be deleted.') - # Ignore blank lines at the end of a block in a long if-else - # chain, like this: - # if (condition1) { - # // Something followed by a blank line - # - # } else if (condition2) { - # // Something else - # } - if linenum + 1 < clean_lines.NumLines(): - next_line = raw[linenum + 1] - if (next_line - and Match(r'\s*}', next_line) - and next_line.find('} else ') == -1): - error(filename, linenum, 'whitespace/blank_line', 3, - 'Redundant blank line at the end of a code block ' - 'should be deleted.') - - matched = Match(r'\s*(public|protected|private):', prev_line) - if matched: - error(filename, linenum, 'whitespace/blank_line', 3, - 'Do not leave a blank line after "%s:"' % matched.group(1)) - - # Next, check comments - next_line_start = 0 - if linenum + 1 < clean_lines.NumLines(): - next_line = raw[linenum + 1] - next_line_start = len(next_line) - len(next_line.lstrip()) - CheckComment(line, filename, linenum, next_line_start, error) - - # get rid of comments and strings - line = clean_lines.elided[linenum] - - # You shouldn't have spaces before your brackets, except maybe after - # 'delete []' or 'return []() {};' - if Search(r'\w\s+\[', line) and not Search(r'(?:delete|return)\s+\[', line): - error(filename, linenum, 'whitespace/braces', 5, - 'Extra space before [') - - # In range-based for, we wanted spaces before and after the colon, but - # not around "::" tokens that might appear. - if (Search(r'for *\(.*[^:]:[^: ]', line) or - Search(r'for *\(.*[^: ]:[^:]', line)): - error(filename, linenum, 'whitespace/forcolon', 2, - 'Missing space around colon in range-based for loop') - - -def CheckOperatorSpacing(filename, clean_lines, linenum, error): - """Checks for horizontal spacing around operators. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Don't try to do spacing checks for operator methods. Do this by - # replacing the troublesome characters with something else, - # preserving column position for all other characters. - # - # The replacement is done repeatedly to avoid false positives from - # operators that call operators. - while True: - match = Match(r'^(.*\boperator\b)(\S+)(\s*\(.*)$', line) - if match: - line = match.group(1) + ('_' * len(match.group(2))) + match.group(3) - else: - break - - # We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )". - # Otherwise not. Note we only check for non-spaces on *both* sides; - # sometimes people put non-spaces on one side when aligning ='s among - # many lines (not that this is behavior that I approve of...) - if ((Search(r'[\w.]=', line) or - Search(r'=[\w.]', line)) - and not Search(r'\b(if|while|for) ', line) - # Operators taken from [lex.operators] in C++11 standard. - and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line) - and not Search(r'operator=', line)): - error(filename, linenum, 'whitespace/operators', 4, - 'Missing spaces around =') - - # It's ok not to have spaces around binary operators like + - * /, but if - # there's too little whitespace, we get concerned. It's hard to tell, - # though, so we punt on this one for now. TODO. - - # You should always have whitespace around binary operators. - # - # Check <= and >= first to avoid false positives with < and >, then - # check non-include lines for spacing around < and >. - # - # If the operator is followed by a comma, assume it's be used in a - # macro context and don't do any checks. This avoids false - # positives. - # - # Note that && is not included here. Those are checked separately - # in CheckRValueReference - match = Search(r'[^<>=!\s](==|!=|<=|>=|\|\|)[^<>=!\s,;\)]', line) - if match: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around %s' % match.group(1)) - elif not Match(r'#.*include', line): - # Look for < that is not surrounded by spaces. This is only - # triggered if both sides are missing spaces, even though - # technically should should flag if at least one side is missing a - # space. This is done to avoid some false positives with shifts. - match = Match(r'^(.*[^\s<])<[^\s=<,]', line) - if match: - (_, _, end_pos) = CloseExpression( - clean_lines, linenum, len(match.group(1))) - if end_pos <= -1: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around <') - - # Look for > that is not surrounded by spaces. Similar to the - # above, we only trigger if both sides are missing spaces to avoid - # false positives with shifts. - match = Match(r'^(.*[^-\s>])>[^\s=>,]', line) - if match: - (_, _, start_pos) = ReverseCloseExpression( - clean_lines, linenum, len(match.group(1))) - if start_pos <= -1: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around >') - - # We allow no-spaces around << when used like this: 10<<20, but - # not otherwise (particularly, not when used as streams) - # - # We also allow operators following an opening parenthesis, since - # those tend to be macros that deal with operators. - match = Search(r'(operator|[^\s(<])(?:L|UL|ULL|l|ul|ull)?<<([^\s,=<])', line) - if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and - not (match.group(1) == 'operator' and match.group(2) == ';')): - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around <<') - - # We allow no-spaces around >> for almost anything. This is because - # C++11 allows ">>" to close nested templates, which accounts for - # most cases when ">>" is not followed by a space. - # - # We still warn on ">>" followed by alpha character, because that is - # likely due to ">>" being used for right shifts, e.g.: - # value >> alpha - # - # When ">>" is used to close templates, the alphanumeric letter that - # follows would be part of an identifier, and there should still be - # a space separating the template type and the identifier. - # type> alpha - match = Search(r'>>[a-zA-Z_]', line) - if match: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around >>') - - # There shouldn't be space around unary operators - match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line) - if match: - error(filename, linenum, 'whitespace/operators', 4, - 'Extra space for operator %s' % match.group(1)) - - -def CheckParenthesisSpacing(filename, clean_lines, linenum, error): - """Checks for horizontal spacing around parentheses. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # No spaces after an if, while, switch, or for - match = Search(r' (if\(|for\(|while\(|switch\()', line) - if match: - error(filename, linenum, 'whitespace/parens', 5, - 'Missing space before ( in %s' % match.group(1)) - - # For if/for/while/switch, the left and right parens should be - # consistent about how many spaces are inside the parens, and - # there should either be zero or one spaces inside the parens. - # We don't want: "if ( foo)" or "if ( foo )". - # Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed. - match = Search(r'\b(if|for|while|switch)\s*' - r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$', - line) - if match: - if len(match.group(2)) != len(match.group(4)): - if not (match.group(3) == ';' and - len(match.group(2)) == 1 + len(match.group(4)) or - not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)): - error(filename, linenum, 'whitespace/parens', 5, - 'Mismatching spaces inside () in %s' % match.group(1)) - if len(match.group(2)) not in [0, 1]: - error(filename, linenum, 'whitespace/parens', 5, - 'Should have zero or one spaces inside ( and ) in %s' % - match.group(1)) - - -def CheckCommaSpacing(filename, clean_lines, linenum, error): - """Checks for horizontal spacing near commas and semicolons. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - raw = clean_lines.lines_without_raw_strings - line = clean_lines.elided[linenum] - - # You should always have a space after a comma (either as fn arg or operator) - # - # This does not apply when the non-space character following the - # comma is another comma, since the only time when that happens is - # for empty macro arguments. - # - # We run this check in two passes: first pass on elided lines to - # verify that lines contain missing whitespaces, second pass on raw - # lines to confirm that those missing whitespaces are not due to - # elided comments. - if (Search(r',[^,\s]', ReplaceAll(r'\boperator\s*,\s*\(', 'F(', line)) and - Search(r',[^,\s]', raw[linenum])): - error(filename, linenum, 'whitespace/comma', 3, - 'Missing space after ,') - - # You should always have a space after a semicolon - # except for few corner cases - # TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more - # space after ; - if Search(r';[^\s};\\)/]', line): - error(filename, linenum, 'whitespace/semicolon', 3, - 'Missing space after ;') - - -def CheckBracesSpacing(filename, clean_lines, linenum, error): - """Checks for horizontal spacing near commas. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Except after an opening paren, or after another opening brace (in case of - # an initializer list, for instance), you should have spaces before your - # braces. And since you should never have braces at the beginning of a line, - # this is an easy test. - match = Match(r'^(.*[^ ({>]){', line) - if match: - # Try a bit harder to check for brace initialization. This - # happens in one of the following forms: - # Constructor() : initializer_list_{} { ... } - # Constructor{}.MemberFunction() - # Type variable{}; - # FunctionCall(type{}, ...); - # LastArgument(..., type{}); - # LOG(INFO) << type{} << " ..."; - # map_of_type[{...}] = ...; - # ternary = expr ? new type{} : nullptr; - # OuterTemplate{}> - # - # We check for the character following the closing brace, and - # silence the warning if it's one of those listed above, i.e. - # "{.;,)<>]:". - # - # To account for nested initializer list, we allow any number of - # closing braces up to "{;,)<". We can't simply silence the - # warning on first sight of closing brace, because that would - # cause false negatives for things that are not initializer lists. - # Silence this: But not this: - # Outer{ if (...) { - # Inner{...} if (...){ // Missing space before { - # }; } - # - # There is a false negative with this approach if people inserted - # spurious semicolons, e.g. "if (cond){};", but we will catch the - # spurious semicolon with a separate check. - (endline, endlinenum, endpos) = CloseExpression( - clean_lines, linenum, len(match.group(1))) - trailing_text = '' - if endpos > -1: - trailing_text = endline[endpos:] - for offset in xrange(endlinenum + 1, - min(endlinenum + 3, clean_lines.NumLines() - 1)): - trailing_text += clean_lines.elided[offset] - if not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text): - error(filename, linenum, 'whitespace/braces', 5, - 'Missing space before {') - - # Make sure '} else {' has spaces. - if Search(r'}else', line): - error(filename, linenum, 'whitespace/braces', 5, - 'Missing space before else') - - # You shouldn't have a space before a semicolon at the end of the line. - # There's a special case for "for" since the style guide allows space before - # the semicolon there. - if Search(r':\s*;\s*$', line): - error(filename, linenum, 'whitespace/semicolon', 5, - 'Semicolon defining empty statement. Use {} instead.') - elif Search(r'^\s*;\s*$', line): - error(filename, linenum, 'whitespace/semicolon', 5, - 'Line contains only semicolon. If this should be an empty statement, ' - 'use {} instead.') - elif (Search(r'\s+;\s*$', line) and - not Search(r'\bfor\b', line)): - error(filename, linenum, 'whitespace/semicolon', 5, - 'Extra space before last semicolon. If this should be an empty ' - 'statement, use {} instead.') - - -def IsDecltype(clean_lines, linenum, column): - """Check if the token ending on (linenum, column) is decltype(). - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: the number of the line to check. - column: end column of the token to check. - Returns: - True if this token is decltype() expression, False otherwise. - """ - (text, _, start_col) = ReverseCloseExpression(clean_lines, linenum, column) - if start_col < 0: - return False - if Search(r'\bdecltype\s*$', text[0:start_col]): - return True - return False - - -def IsTemplateParameterList(clean_lines, linenum, column): - """Check if the token ending on (linenum, column) is the end of template<>. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: the number of the line to check. - column: end column of the token to check. - Returns: - True if this token is end of a template parameter list, False otherwise. - """ - (_, startline, startpos) = ReverseCloseExpression( - clean_lines, linenum, column) - if (startpos > -1 and - Search(r'\btemplate\s*$', clean_lines.elided[startline][0:startpos])): - return True - return False - - -def IsRValueType(typenames, clean_lines, nesting_state, linenum, column): - """Check if the token ending on (linenum, column) is a type. - - Assumes that text to the right of the column is "&&" or a function - name. - - Args: - typenames: set of type names from template-argument-list. - clean_lines: A CleansedLines instance containing the file. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - linenum: the number of the line to check. - column: end column of the token to check. - Returns: - True if this token is a type, False if we are not sure. - """ - prefix = clean_lines.elided[linenum][0:column] - - # Get one word to the left. If we failed to do so, this is most - # likely not a type, since it's unlikely that the type name and "&&" - # would be split across multiple lines. - match = Match(r'^(.*)(\b\w+|[>*)&])\s*$', prefix) - if not match: - return False - - # Check text following the token. If it's "&&>" or "&&," or "&&...", it's - # most likely a rvalue reference used inside a template. - suffix = clean_lines.elided[linenum][column:] - if Match(r'&&\s*(?:[>,]|\.\.\.)', suffix): - return True - - # Check for known types and end of templates: - # int&& variable - # vector&& variable - # - # Because this function is called recursively, we also need to - # recognize pointer and reference types: - # int* Function() - # int& Function() - if (match.group(2) in typenames or - match.group(2) in ['char', 'char16_t', 'char32_t', 'wchar_t', 'bool', - 'short', 'int', 'long', 'signed', 'unsigned', - 'float', 'double', 'void', 'auto', '>', '*', '&']): - return True - - # If we see a close parenthesis, look for decltype on the other side. - # decltype would unambiguously identify a type, anything else is - # probably a parenthesized expression and not a type. - if match.group(2) == ')': - return IsDecltype( - clean_lines, linenum, len(match.group(1)) + len(match.group(2)) - 1) - - # Check for casts and cv-qualifiers. - # match.group(1) remainder - # -------------- --------- - # const_cast< type&& - # const type&& - # type const&& - if Search(r'\b(?:const_cast\s*<|static_cast\s*<|dynamic_cast\s*<|' - r'reinterpret_cast\s*<|\w+\s)\s*$', - match.group(1)): - return True - - # Look for a preceding symbol that might help differentiate the context. - # These are the cases that would be ambiguous: - # match.group(1) remainder - # -------------- --------- - # Call ( expression && - # Declaration ( type&& - # sizeof ( type&& - # if ( expression && - # while ( expression && - # for ( type&& - # for( ; expression && - # statement ; type&& - # block { type&& - # constructor { expression && - start = linenum - line = match.group(1) - match_symbol = None - while start >= 0: - # We want to skip over identifiers and commas to get to a symbol. - # Commas are skipped so that we can find the opening parenthesis - # for function parameter lists. - match_symbol = Match(r'^(.*)([^\w\s,])[\w\s,]*$', line) - if match_symbol: - break - start -= 1 - line = clean_lines.elided[start] - - if not match_symbol: - # Probably the first statement in the file is an rvalue reference - return True - - if match_symbol.group(2) == '}': - # Found closing brace, probably an indicate of this: - # block{} type&& - return True - - if match_symbol.group(2) == ';': - # Found semicolon, probably one of these: - # for(; expression && - # statement; type&& - - # Look for the previous 'for(' in the previous lines. - before_text = match_symbol.group(1) - for i in xrange(start - 1, max(start - 6, 0), -1): - before_text = clean_lines.elided[i] + before_text - if Search(r'for\s*\([^{};]*$', before_text): - # This is the condition inside a for-loop - return False - - # Did not find a for-init-statement before this semicolon, so this - # is probably a new statement and not a condition. - return True - - if match_symbol.group(2) == '{': - # Found opening brace, probably one of these: - # block{ type&& = ... ; } - # constructor{ expression && expression } - - # Look for a closing brace or a semicolon. If we see a semicolon - # first, this is probably a rvalue reference. - line = clean_lines.elided[start][0:len(match_symbol.group(1)) + 1] - end = start - depth = 1 - while True: - for ch in line: - if ch == ';': - return True - elif ch == '{': - depth += 1 - elif ch == '}': - depth -= 1 - if depth == 0: - return False - end += 1 - if end >= clean_lines.NumLines(): - break - line = clean_lines.elided[end] - # Incomplete program? - return False - - if match_symbol.group(2) == '(': - # Opening parenthesis. Need to check what's to the left of the - # parenthesis. Look back one extra line for additional context. - before_text = match_symbol.group(1) - if linenum > 1: - before_text = clean_lines.elided[linenum - 1] + before_text - before_text = match_symbol.group(1) - - # Patterns that are likely to be types: - # [](type&& - # for (type&& - # sizeof(type&& - # operator=(type&& - # - if Search(r'(?:\]|\bfor|\bsizeof|\boperator\s*\S+\s*)\s*$', before_text): - return True - - # Patterns that are likely to be expressions: - # if (expression && - # while (expression && - # : initializer(expression && - # , initializer(expression && - # ( FunctionCall(expression && - # + FunctionCall(expression && - # + (expression && - # - # The last '+' represents operators such as '+' and '-'. - if Search(r'(?:\bif|\bwhile|[-+=%^(]*>)?\s*$', - match_symbol.group(1)) - if match_func: - # Check for constructors, which don't have return types. - if Search(r'\b(?:explicit|inline)$', match_func.group(1)): - return True - implicit_constructor = Match(r'\s*(\w+)\((?:const\s+)?(\w+)', prefix) - if (implicit_constructor and - implicit_constructor.group(1) == implicit_constructor.group(2)): - return True - return IsRValueType(typenames, clean_lines, nesting_state, linenum, - len(match_func.group(1))) - - # Nothing before the function name. If this is inside a block scope, - # this is probably a function call. - return not (nesting_state.previous_stack_top and - nesting_state.previous_stack_top.IsBlockInfo()) - - if match_symbol.group(2) == '>': - # Possibly a closing bracket, check that what's on the other side - # looks like the start of a template. - return IsTemplateParameterList( - clean_lines, start, len(match_symbol.group(1))) - - # Some other symbol, usually something like "a=b&&c". This is most - # likely not a type. - return False - - -def IsDeletedOrDefault(clean_lines, linenum): - """Check if current constructor or operator is deleted or default. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if this is a deleted or default constructor. - """ - open_paren = clean_lines.elided[linenum].find('(') - if open_paren < 0: - return False - (close_line, _, close_paren) = CloseExpression( - clean_lines, linenum, open_paren) - if close_paren < 0: - return False - return Match(r'\s*=\s*(?:delete|default)\b', close_line[close_paren:]) - - -def IsRValueAllowed(clean_lines, linenum, typenames): - """Check if RValue reference is allowed on a particular line. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - typenames: set of type names from template-argument-list. - Returns: - True if line is within the region where RValue references are allowed. - """ - # Allow region marked by PUSH/POP macros - for i in xrange(linenum, 0, -1): - line = clean_lines.elided[i] - if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line): - if not line.endswith('PUSH'): - return False - for j in xrange(linenum, clean_lines.NumLines(), 1): - line = clean_lines.elided[j] - if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line): - return line.endswith('POP') - - # Allow operator= - line = clean_lines.elided[linenum] - if Search(r'\boperator\s*=\s*\(', line): - return IsDeletedOrDefault(clean_lines, linenum) - - # Allow constructors - match = Match(r'\s*(?:[\w<>]+::)*([\w<>]+)\s*::\s*([\w<>]+)\s*\(', line) - if match and match.group(1) == match.group(2): - return IsDeletedOrDefault(clean_lines, linenum) - if Search(r'\b(?:explicit|inline)\s+[\w<>]+\s*\(', line): - return IsDeletedOrDefault(clean_lines, linenum) - - if Match(r'\s*[\w<>]+\s*\(', line): - previous_line = 'ReturnType' - if linenum > 0: - previous_line = clean_lines.elided[linenum - 1] - if Match(r'^\s*$', previous_line) or Search(r'[{}:;]\s*$', previous_line): - return IsDeletedOrDefault(clean_lines, linenum) - - # Reject types not mentioned in template-argument-list - while line: - match = Match(r'^.*?(\w+)\s*&&(.*)$', line) - if not match: - break - if match.group(1) not in typenames: - return False - line = match.group(2) - - # All RValue types that were in template-argument-list should have - # been removed by now. Those were allowed, assuming that they will - # be forwarded. - # - # If there are no remaining RValue types left (i.e. types that were - # not found in template-argument-list), flag those as not allowed. - return line.find('&&') < 0 - - -def GetTemplateArgs(clean_lines, linenum): - """Find list of template arguments associated with this function declaration. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: Line number containing the start of the function declaration, - usually one line after the end of the template-argument-list. - Returns: - Set of type names, or empty set if this does not appear to have - any template parameters. - """ - # Find start of function - func_line = linenum - while func_line > 0: - line = clean_lines.elided[func_line] - if Match(r'^\s*$', line): - return set() - if line.find('(') >= 0: - break - func_line -= 1 - if func_line == 0: - return set() - - # Collapse template-argument-list into a single string - argument_list = '' - match = Match(r'^(\s*template\s*)<', clean_lines.elided[func_line]) - if match: - # template-argument-list on the same line as function name - start_col = len(match.group(1)) - _, end_line, end_col = CloseExpression(clean_lines, func_line, start_col) - if end_col > -1 and end_line == func_line: - start_col += 1 # Skip the opening bracket - argument_list = clean_lines.elided[func_line][start_col:end_col] - - elif func_line > 1: - # template-argument-list one line before function name - match = Match(r'^(.*)>\s*$', clean_lines.elided[func_line - 1]) - if match: - end_col = len(match.group(1)) - _, start_line, start_col = ReverseCloseExpression( - clean_lines, func_line - 1, end_col) - if start_col > -1: - start_col += 1 # Skip the opening bracket - while start_line < func_line - 1: - argument_list += clean_lines.elided[start_line][start_col:] - start_col = 0 - start_line += 1 - argument_list += clean_lines.elided[func_line - 1][start_col:end_col] - - if not argument_list: - return set() - - # Extract type names - typenames = set() - while True: - match = Match(r'^[,\s]*(?:typename|class)(?:\.\.\.)?\s+(\w+)(.*)$', - argument_list) - if not match: - break - typenames.add(match.group(1)) - argument_list = match.group(2) - return typenames - - -def CheckRValueReference(filename, clean_lines, linenum, nesting_state, error): - """Check for rvalue references. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - # Find lines missing spaces around &&. - # TODO(unknown): currently we don't check for rvalue references - # with spaces surrounding the && to avoid false positives with - # boolean expressions. - line = clean_lines.elided[linenum] - match = Match(r'^(.*\S)&&', line) - if not match: - match = Match(r'(.*)&&\S', line) - if (not match) or '(&&)' in line or Search(r'\boperator\s*$', match.group(1)): - return - - # Either poorly formed && or an rvalue reference, check the context - # to get a more accurate error message. Mostly we want to determine - # if what's to the left of "&&" is a type or not. - typenames = GetTemplateArgs(clean_lines, linenum) - and_pos = len(match.group(1)) - if IsRValueType(typenames, clean_lines, nesting_state, linenum, and_pos): - if not IsRValueAllowed(clean_lines, linenum, typenames): - error(filename, linenum, 'build/c++11', 3, - 'RValue references are an unapproved C++ feature.') - else: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around &&') - - -def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error): - """Checks for additional blank line issues related to sections. - - Currently the only thing checked here is blank line before protected/private. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - class_info: A _ClassInfo objects. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - # Skip checks if the class is small, where small means 25 lines or less. - # 25 lines seems like a good cutoff since that's the usual height of - # terminals, and any class that can't fit in one screen can't really - # be considered "small". - # - # Also skip checks if we are on the first line. This accounts for - # classes that look like - # class Foo { public: ... }; - # - # If we didn't find the end of the class, last_line would be zero, - # and the check will be skipped by the first condition. - if (class_info.last_line - class_info.starting_linenum <= 24 or - linenum <= class_info.starting_linenum): - return - - matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum]) - if matched: - # Issue warning if the line before public/protected/private was - # not a blank line, but don't do this if the previous line contains - # "class" or "struct". This can happen two ways: - # - We are at the beginning of the class. - # - We are forward-declaring an inner class that is semantically - # private, but needed to be public for implementation reasons. - # Also ignores cases where the previous line ends with a backslash as can be - # common when defining classes in C macros. - prev_line = clean_lines.lines[linenum - 1] - if (not IsBlankLine(prev_line) and - not Search(r'\b(class|struct)\b', prev_line) and - not Search(r'\\$', prev_line)): - # Try a bit harder to find the beginning of the class. This is to - # account for multi-line base-specifier lists, e.g.: - # class Derived - # : public Base { - end_class_head = class_info.starting_linenum - for i in range(class_info.starting_linenum, linenum): - if Search(r'\{\s*$', clean_lines.lines[i]): - end_class_head = i - break - if end_class_head < linenum - 1: - error(filename, linenum, 'whitespace/blank_line', 3, - '"%s:" should be preceded by a blank line' % matched.group(1)) - - -def GetPreviousNonBlankLine(clean_lines, linenum): - """Return the most recent non-blank line and its line number. - - Args: - clean_lines: A CleansedLines instance containing the file contents. - linenum: The number of the line to check. - - Returns: - A tuple with two elements. The first element is the contents of the last - non-blank line before the current line, or the empty string if this is the - first non-blank line. The second is the line number of that line, or -1 - if this is the first non-blank line. - """ - - prevlinenum = linenum - 1 - while prevlinenum >= 0: - prevline = clean_lines.elided[prevlinenum] - if not IsBlankLine(prevline): # if not a blank line... - return (prevline, prevlinenum) - prevlinenum -= 1 - return ('', -1) - - -def CheckBraces(filename, clean_lines, linenum, error): - """Looks for misplaced braces (e.g. at the end of line). - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - line = clean_lines.elided[linenum] # get rid of comments and strings - - if Match(r'\s*{\s*$', line): - # We allow an open brace to start a line in the case where someone is using - # braces in a block to explicitly create a new scope, which is commonly used - # to control the lifetime of stack-allocated variables. Braces are also - # used for brace initializers inside function calls. We don't detect this - # perfectly: we just don't complain if the last non-whitespace character on - # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the - # previous line starts a preprocessor block. - prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] - if (not Search(r'[,;:}{(]\s*$', prevline) and - not Match(r'\s*#', prevline)): - error(filename, linenum, 'whitespace/braces', 4, - '{ should almost always be at the end of the previous line') - - # An else clause should be on the same line as the preceding closing brace. - if Match(r'\s*else\b\s*(?:if\b|\{|$)', line): - prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] - if Match(r'\s*}\s*$', prevline): - error(filename, linenum, 'whitespace/newline', 4, - 'An else should appear on the same line as the preceding }') - - # If braces come on one side of an else, they should be on both. - # However, we have to worry about "else if" that spans multiple lines! - if Search(r'else if\s*\(', line): # could be multi-line if - brace_on_left = bool(Search(r'}\s*else if\s*\(', line)) - # find the ( after the if - pos = line.find('else if') - pos = line.find('(', pos) - if pos > 0: - (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos) - brace_on_right = endline[endpos:].find('{') != -1 - if brace_on_left != brace_on_right: # must be brace after if - error(filename, linenum, 'readability/braces', 5, - 'If an else has a brace on one side, it should have it on both') - elif Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line): - error(filename, linenum, 'readability/braces', 5, - 'If an else has a brace on one side, it should have it on both') - - # Likewise, an else should never have the else clause on the same line - if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line): - error(filename, linenum, 'whitespace/newline', 4, - 'Else clause should never be on same line as else (use 2 lines)') - - # In the same way, a do/while should never be on one line - if Match(r'\s*do [^\s{]', line): - error(filename, linenum, 'whitespace/newline', 4, - 'do/while clauses should not be on a single line') - - # Check single-line if/else bodies. The style guide says 'curly braces are not - # required for single-line statements'. We additionally allow multi-line, - # single statements, but we reject anything with more than one semicolon in - # it. This means that the first semicolon after the if should be at the end of - # its line, and the line after that should have an indent level equal to or - # lower than the if. We also check for ambiguous if/else nesting without - # braces. - if_else_match = Search(r'\b(if\s*\(|else\b)', line) - if if_else_match and not Match(r'\s*#', line): - if_indent = GetIndentLevel(line) - endline, endlinenum, endpos = line, linenum, if_else_match.end() - if_match = Search(r'\bif\s*\(', line) - if if_match: - # This could be a multiline if condition, so find the end first. - pos = if_match.end() - 1 - (endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum, pos) - # Check for an opening brace, either directly after the if or on the next - # line. If found, this isn't a single-statement conditional. - if (not Match(r'\s*{', endline[endpos:]) - and not (Match(r'\s*$', endline[endpos:]) - and endlinenum < (len(clean_lines.elided) - 1) - and Match(r'\s*{', clean_lines.elided[endlinenum + 1]))): - while (endlinenum < len(clean_lines.elided) - and ';' not in clean_lines.elided[endlinenum][endpos:]): - endlinenum += 1 - endpos = 0 - if endlinenum < len(clean_lines.elided): - endline = clean_lines.elided[endlinenum] - # We allow a mix of whitespace and closing braces (e.g. for one-liner - # methods) and a single \ after the semicolon (for macros) - endpos = endline.find(';') - if not Match(r';[\s}]*(\\?)$', endline[endpos:]): - # Semicolon isn't the last character, there's something trailing. - # Output a warning if the semicolon is not contained inside - # a lambda expression. - if not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\)*[;,]\s*$', - endline): - error(filename, linenum, 'readability/braces', 4, - 'If/else bodies with multiple statements require braces') - elif endlinenum < len(clean_lines.elided) - 1: - # Make sure the next line is dedented - next_line = clean_lines.elided[endlinenum + 1] - next_indent = GetIndentLevel(next_line) - # With ambiguous nested if statements, this will error out on the - # if that *doesn't* match the else, regardless of whether it's the - # inner one or outer one. - if (if_match and Match(r'\s*else\b', next_line) - and next_indent != if_indent): - error(filename, linenum, 'readability/braces', 4, - 'Else clause should be indented at the same level as if. ' - 'Ambiguous nested if/else chains require braces.') - elif next_indent > if_indent: - error(filename, linenum, 'readability/braces', 4, - 'If/else bodies with multiple statements require braces') - - -def CheckTrailingSemicolon(filename, clean_lines, linenum, error): - """Looks for redundant trailing semicolon. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - line = clean_lines.elided[linenum] - - # Block bodies should not be followed by a semicolon. Due to C++11 - # brace initialization, there are more places where semicolons are - # required than not, so we use a whitelist approach to check these - # rather than a blacklist. These are the places where "};" should - # be replaced by just "}": - # 1. Some flavor of block following closing parenthesis: - # for (;;) {}; - # while (...) {}; - # switch (...) {}; - # Function(...) {}; - # if (...) {}; - # if (...) else if (...) {}; - # - # 2. else block: - # if (...) else {}; - # - # 3. const member function: - # Function(...) const {}; - # - # 4. Block following some statement: - # x = 42; - # {}; - # - # 5. Block at the beginning of a function: - # Function(...) { - # {}; - # } - # - # Note that naively checking for the preceding "{" will also match - # braces inside multi-dimensional arrays, but this is fine since - # that expression will not contain semicolons. - # - # 6. Block following another block: - # while (true) {} - # {}; - # - # 7. End of namespaces: - # namespace {}; - # - # These semicolons seems far more common than other kinds of - # redundant semicolons, possibly due to people converting classes - # to namespaces. For now we do not warn for this case. - # - # Try matching case 1 first. - match = Match(r'^(.*\)\s*)\{', line) - if match: - # Matched closing parenthesis (case 1). Check the token before the - # matching opening parenthesis, and don't warn if it looks like a - # macro. This avoids these false positives: - # - macro that defines a base class - # - multi-line macro that defines a base class - # - macro that defines the whole class-head - # - # But we still issue warnings for macros that we know are safe to - # warn, specifically: - # - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P - # - TYPED_TEST - # - INTERFACE_DEF - # - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED: - # - # We implement a whitelist of safe macros instead of a blacklist of - # unsafe macros, even though the latter appears less frequently in - # google code and would have been easier to implement. This is because - # the downside for getting the whitelist wrong means some extra - # semicolons, while the downside for getting the blacklist wrong - # would result in compile errors. - # - # In addition to macros, we also don't want to warn on - # - Compound literals - # - Lambdas - # - alignas specifier with anonymous structs: - closing_brace_pos = match.group(1).rfind(')') - opening_parenthesis = ReverseCloseExpression( - clean_lines, linenum, closing_brace_pos) - if opening_parenthesis[2] > -1: - line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]] - macro = Search(r'\b([A-Z_]+)\s*$', line_prefix) - func = Match(r'^(.*\])\s*$', line_prefix) - if ((macro and - macro.group(1) not in ( - 'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST', - 'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED', - 'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or - (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or - Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or - Search(r'\s+=\s*$', line_prefix)): - match = None - if (match and - opening_parenthesis[1] > 1 and - Search(r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])): - # Multi-line lambda-expression - match = None - - else: - # Try matching cases 2-3. - match = Match(r'^(.*(?:else|\)\s*const)\s*)\{', line) - if not match: - # Try matching cases 4-6. These are always matched on separate lines. - # - # Note that we can't simply concatenate the previous line to the - # current line and do a single match, otherwise we may output - # duplicate warnings for the blank line case: - # if (cond) { - # // blank line - # } - prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] - if prevline and Search(r'[;{}]\s*$', prevline): - match = Match(r'^(\s*)\{', line) - - # Check matching closing brace - if match: - (endline, endlinenum, endpos) = CloseExpression( - clean_lines, linenum, len(match.group(1))) - if endpos > -1 and Match(r'^\s*;', endline[endpos:]): - # Current {} pair is eligible for semicolon check, and we have found - # the redundant semicolon, output warning here. - # - # Note: because we are scanning forward for opening braces, and - # outputting warnings for the matching closing brace, if there are - # nested blocks with trailing semicolons, we will get the error - # messages in reversed order. - error(filename, endlinenum, 'readability/braces', 4, - "You don't need a ; after a }") - - -def CheckEmptyBlockBody(filename, clean_lines, linenum, error): - """Look for empty loop/conditional body with only a single semicolon. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - # Search for loop keywords at the beginning of the line. Because only - # whitespaces are allowed before the keywords, this will also ignore most - # do-while-loops, since those lines should start with closing brace. - # - # We also check "if" blocks here, since an empty conditional block - # is likely an error. - line = clean_lines.elided[linenum] - matched = Match(r'\s*(for|while|if)\s*\(', line) - if matched: - # Find the end of the conditional expression - (end_line, end_linenum, end_pos) = CloseExpression( - clean_lines, linenum, line.find('(')) - - # Output warning if what follows the condition expression is a semicolon. - # No warning for all other cases, including whitespace or newline, since we - # have a separate check for semicolons preceded by whitespace. - if end_pos >= 0 and Match(r';', end_line[end_pos:]): - if matched.group(1) == 'if': - error(filename, end_linenum, 'whitespace/empty_conditional_body', 5, - 'Empty conditional bodies should use {}') - else: - error(filename, end_linenum, 'whitespace/empty_loop_body', 5, - 'Empty loop bodies should use {} or continue') - - -def FindCheckMacro(line): - """Find a replaceable CHECK-like macro. - - Args: - line: line to search on. - Returns: - (macro name, start position), or (None, -1) if no replaceable - macro is found. - """ - for macro in _CHECK_MACROS: - i = line.find(macro) - if i >= 0: - # Find opening parenthesis. Do a regular expression match here - # to make sure that we are matching the expected CHECK macro, as - # opposed to some other macro that happens to contain the CHECK - # substring. - matched = Match(r'^(.*\b' + macro + r'\s*)\(', line) - if not matched: - continue - return (macro, len(matched.group(1))) - return (None, -1) - - -def CheckCheck(filename, clean_lines, linenum, error): - """Checks the use of CHECK and EXPECT macros. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - # Decide the set of replacement macros that should be suggested - lines = clean_lines.elided - (check_macro, start_pos) = FindCheckMacro(lines[linenum]) - if not check_macro: - return - - # Find end of the boolean expression by matching parentheses - (last_line, end_line, end_pos) = CloseExpression( - clean_lines, linenum, start_pos) - if end_pos < 0: - return - - # If the check macro is followed by something other than a - # semicolon, assume users will log their own custom error messages - # and don't suggest any replacements. - if not Match(r'\s*;', last_line[end_pos:]): - return - - if linenum == end_line: - expression = lines[linenum][start_pos + 1:end_pos - 1] - else: - expression = lines[linenum][start_pos + 1:] - for i in xrange(linenum + 1, end_line): - expression += lines[i] - expression += last_line[0:end_pos - 1] - - # Parse expression so that we can take parentheses into account. - # This avoids false positives for inputs like "CHECK((a < 4) == b)", - # which is not replaceable by CHECK_LE. - lhs = '' - rhs = '' - operator = None - while expression: - matched = Match(r'^\s*(<<|<<=|>>|>>=|->\*|->|&&|\|\||' - r'==|!=|>=|>|<=|<|\()(.*)$', expression) - if matched: - token = matched.group(1) - if token == '(': - # Parenthesized operand - expression = matched.group(2) - (end, _) = FindEndOfExpressionInLine(expression, 0, ['(']) - if end < 0: - return # Unmatched parenthesis - lhs += '(' + expression[0:end] - expression = expression[end:] - elif token in ('&&', '||'): - # Logical and/or operators. This means the expression - # contains more than one term, for example: - # CHECK(42 < a && a < b); - # - # These are not replaceable with CHECK_LE, so bail out early. - return - elif token in ('<<', '<<=', '>>', '>>=', '->*', '->'): - # Non-relational operator - lhs += token - expression = matched.group(2) - else: - # Relational operator - operator = token - rhs = matched.group(2) - break - else: - # Unparenthesized operand. Instead of appending to lhs one character - # at a time, we do another regular expression match to consume several - # characters at once if possible. Trivial benchmark shows that this - # is more efficient when the operands are longer than a single - # character, which is generally the case. - matched = Match(r'^([^-=!<>()&|]+)(.*)$', expression) - if not matched: - matched = Match(r'^(\s*\S)(.*)$', expression) - if not matched: - break - lhs += matched.group(1) - expression = matched.group(2) - - # Only apply checks if we got all parts of the boolean expression - if not (lhs and operator and rhs): - return - - # Check that rhs do not contain logical operators. We already know - # that lhs is fine since the loop above parses out && and ||. - if rhs.find('&&') > -1 or rhs.find('||') > -1: - return - - # At least one of the operands must be a constant literal. This is - # to avoid suggesting replacements for unprintable things like - # CHECK(variable != iterator) - # - # The following pattern matches decimal, hex integers, strings, and - # characters (in that order). - lhs = lhs.strip() - rhs = rhs.strip() - match_constant = r'^([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')$' - if Match(match_constant, lhs) or Match(match_constant, rhs): - # Note: since we know both lhs and rhs, we can provide a more - # descriptive error message like: - # Consider using CHECK_EQ(x, 42) instead of CHECK(x == 42) - # Instead of: - # Consider using CHECK_EQ instead of CHECK(a == b) - # - # We are still keeping the less descriptive message because if lhs - # or rhs gets long, the error message might become unreadable. - error(filename, linenum, 'readability/check', 2, - 'Consider using %s instead of %s(a %s b)' % ( - _CHECK_REPLACEMENT[check_macro][operator], - check_macro, operator)) - - -def CheckAltTokens(filename, clean_lines, linenum, error): - """Check alternative keywords being used in boolean expressions. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Avoid preprocessor lines - if Match(r'^\s*#', line): - return - - # Last ditch effort to avoid multi-line comments. This will not help - # if the comment started before the current line or ended after the - # current line, but it catches most of the false positives. At least, - # it provides a way to workaround this warning for people who use - # multi-line comments in preprocessor macros. - # - # TODO(unknown): remove this once cpplint has better support for - # multi-line comments. - if line.find('/*') >= 0 or line.find('*/') >= 0: - return - - for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line): - error(filename, linenum, 'readability/alt_tokens', 2, - 'Use operator %s instead of %s' % ( - _ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1))) - - -def GetLineWidth(line): - """Determines the width of the line in column positions. - - Args: - line: A string, which may be a Unicode string. - - Returns: - The width of the line in column positions, accounting for Unicode - combining characters and wide characters. - """ - if isinstance(line, unicode): - width = 0 - for uc in unicodedata.normalize('NFC', line): - if unicodedata.east_asian_width(uc) in ('W', 'F'): - width += 2 - elif not unicodedata.combining(uc): - width += 1 - return width - else: - return len(line) - - -def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state, - error): - """Checks rules from the 'C++ style rules' section of cppguide.html. - - Most of these rules are hard to test (naming, comment style), but we - do what we can. In particular we check for 2-space indents, line lengths, - tab usage, spaces inside code, etc. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - file_extension: The extension (without the dot) of the filename. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - - # Don't use "elided" lines here, otherwise we can't check commented lines. - # Don't want to use "raw" either, because we don't want to check inside C++11 - # raw strings, - raw_lines = clean_lines.lines_without_raw_strings - line = raw_lines[linenum] - - if line.find('\t') != -1: - error(filename, linenum, 'whitespace/tab', 1, - 'Tab found; better to use spaces') - - # One or three blank spaces at the beginning of the line is weird; it's - # hard to reconcile that with 2-space indents. - # NOTE: here are the conditions rob pike used for his tests. Mine aren't - # as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces - # if(RLENGTH > 20) complain = 0; - # if(match($0, " +(error|private|public|protected):")) complain = 0; - # if(match(prev, "&& *$")) complain = 0; - # if(match(prev, "\\|\\| *$")) complain = 0; - # if(match(prev, "[\",=><] *$")) complain = 0; - # if(match($0, " <<")) complain = 0; - # if(match(prev, " +for \\(")) complain = 0; - # if(prevodd && match(prevprev, " +for \\(")) complain = 0; - scope_or_label_pattern = r'\s*\w+\s*:\s*\\?$' - classinfo = nesting_state.InnermostClass() - initial_spaces = 0 - cleansed_line = clean_lines.elided[linenum] - while initial_spaces < len(line) and line[initial_spaces] == ' ': - initial_spaces += 1 - if line and line[-1].isspace(): - error(filename, linenum, 'whitespace/end_of_line', 4, - 'Line ends in whitespace. Consider deleting these extra spaces.') - # There are certain situations we allow one space, notably for - # section labels, and also lines containing multi-line raw strings. - elif ((initial_spaces == 1 or initial_spaces == 3) and - not Match(scope_or_label_pattern, cleansed_line) and - not (clean_lines.raw_lines[linenum] != line and - Match(r'^\s*""', line))): - error(filename, linenum, 'whitespace/indent', 3, - 'Weird number of spaces at line-start. ' - 'Are you using a 2-space indent?') - - # Check if the line is a header guard. - is_header_guard = False - if file_extension == 'h': - cppvar = GetHeaderGuardCPPVariable(filename) - if (line.startswith('#ifndef %s' % cppvar) or - line.startswith('#define %s' % cppvar) or - line.startswith('#endif // %s' % cppvar)): - is_header_guard = True - # #include lines and header guards can be long, since there's no clean way to - # split them. - # - # URLs can be long too. It's possible to split these, but it makes them - # harder to cut&paste. - # - # The "$Id:...$" comment may also get very long without it being the - # developers fault. - if (not line.startswith('#include') and not is_header_guard and - not Match(r'^\s*//.*http(s?)://\S*$', line) and - not Match(r'^// \$Id:.*#[0-9]+ \$$', line)): - line_width = GetLineWidth(line) - extended_length = int((_line_length * 1.25)) - if line_width > extended_length: - error(filename, linenum, 'whitespace/line_length', 4, - 'Lines should very rarely be longer than %i characters' % - extended_length) - elif line_width > _line_length: - error(filename, linenum, 'whitespace/line_length', 2, - 'Lines should be <= %i characters long' % _line_length) - - if (cleansed_line.count(';') > 1 and - # for loops are allowed two ;'s (and may run over two lines). - cleansed_line.find('for') == -1 and - (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or - GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and - # It's ok to have many commands in a switch case that fits in 1 line - not ((cleansed_line.find('case ') != -1 or - cleansed_line.find('default:') != -1) and - cleansed_line.find('break;') != -1)): - error(filename, linenum, 'whitespace/newline', 0, - 'More than one command on the same line') - - # Some more style checks - CheckBraces(filename, clean_lines, linenum, error) - CheckTrailingSemicolon(filename, clean_lines, linenum, error) - CheckEmptyBlockBody(filename, clean_lines, linenum, error) - CheckAccess(filename, clean_lines, linenum, nesting_state, error) - CheckSpacing(filename, clean_lines, linenum, nesting_state, error) - CheckOperatorSpacing(filename, clean_lines, linenum, error) - CheckParenthesisSpacing(filename, clean_lines, linenum, error) - CheckCommaSpacing(filename, clean_lines, linenum, error) - CheckBracesSpacing(filename, clean_lines, linenum, error) - CheckSpacingForFunctionCall(filename, clean_lines, linenum, error) - CheckRValueReference(filename, clean_lines, linenum, nesting_state, error) - CheckCheck(filename, clean_lines, linenum, error) - CheckAltTokens(filename, clean_lines, linenum, error) - classinfo = nesting_state.InnermostClass() - if classinfo: - CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error) - - -_RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$') -# Matches the first component of a filename delimited by -s and _s. That is: -# _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo' -# _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo' -# _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo' -# _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo' -_RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+') - - -def _DropCommonSuffixes(filename): - """Drops common suffixes like _test.cc or -inl.h from filename. - - For example: - >>> _DropCommonSuffixes('foo/foo-inl.h') - 'foo/foo' - >>> _DropCommonSuffixes('foo/bar/foo.cc') - 'foo/bar/foo' - >>> _DropCommonSuffixes('foo/foo_internal.h') - 'foo/foo' - >>> _DropCommonSuffixes('foo/foo_unusualinternal.h') - 'foo/foo_unusualinternal' - - Args: - filename: The input filename. - - Returns: - The filename with the common suffix removed. - """ - for suffix in ('test.cc', 'regtest.cc', 'unittest.cc', - 'inl.h', 'impl.h', 'internal.h'): - if (filename.endswith(suffix) and len(filename) > len(suffix) and - filename[-len(suffix) - 1] in ('-', '_')): - return filename[:-len(suffix) - 1] - return os.path.splitext(filename)[0] - - -def _IsTestFilename(filename): - """Determines if the given filename has a suffix that identifies it as a test. - - Args: - filename: The input filename. - - Returns: - True if 'filename' looks like a test, False otherwise. - """ - if (filename.endswith('_test.cc') or - filename.endswith('_unittest.cc') or - filename.endswith('_regtest.cc')): - return True - else: - return False - - -def _ClassifyInclude(fileinfo, include, is_system): - """Figures out what kind of header 'include' is. - - Args: - fileinfo: The current file cpplint is running over. A FileInfo instance. - include: The path to a #included file. - is_system: True if the #include used <> rather than "". - - Returns: - One of the _XXX_HEADER constants. - - For example: - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True) - _C_SYS_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True) - _CPP_SYS_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False) - _LIKELY_MY_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'), - ... 'bar/foo_other_ext.h', False) - _POSSIBLE_MY_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False) - _OTHER_HEADER - """ - # This is a list of all standard c++ header files, except - # those already checked for above. - is_cpp_h = include in _CPP_HEADERS - - if is_system: - if is_cpp_h: - return _CPP_SYS_HEADER - else: - return _C_SYS_HEADER - - # If the target file and the include we're checking share a - # basename when we drop common extensions, and the include - # lives in . , then it's likely to be owned by the target file. - target_dir, target_base = ( - os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName()))) - include_dir, include_base = os.path.split(_DropCommonSuffixes(include)) - if target_base == include_base and ( - include_dir == target_dir or - include_dir == os.path.normpath(target_dir + '/../public')): - return _LIKELY_MY_HEADER - - # If the target and include share some initial basename - # component, it's possible the target is implementing the - # include, so it's allowed to be first, but we'll never - # complain if it's not there. - target_first_component = _RE_FIRST_COMPONENT.match(target_base) - include_first_component = _RE_FIRST_COMPONENT.match(include_base) - if (target_first_component and include_first_component and - target_first_component.group(0) == - include_first_component.group(0)): - return _POSSIBLE_MY_HEADER - - return _OTHER_HEADER - - - -def CheckIncludeLine(filename, clean_lines, linenum, include_state, error): - """Check rules that are applicable to #include lines. - - Strings on #include lines are NOT removed from elided line, to make - certain tasks easier. However, to prevent false positives, checks - applicable to #include lines in CheckLanguage must be put here. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - include_state: An _IncludeState instance in which the headers are inserted. - error: The function to call with any errors found. - """ - fileinfo = FileInfo(filename) - line = clean_lines.lines[linenum] - - # "include" should use the new style "foo/bar.h" instead of just "bar.h" - # Only do this check if the included header follows google naming - # conventions. If not, assume that it's a 3rd party API that - # requires special include conventions. - # - # We also make an exception for Lua headers, which follow google - # naming convention but not the include convention. - match = Match(r'#include\s*"([^/]+\.h)"', line) - if match and not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1)): - error(filename, linenum, 'build/include', 4, - 'Include the directory when naming .h files') - - # we shouldn't include a file more than once. actually, there are a - # handful of instances where doing so is okay, but in general it's - # not. - match = _RE_PATTERN_INCLUDE.search(line) - if match: - include = match.group(2) - is_system = (match.group(1) == '<') - duplicate_line = include_state.FindHeader(include) - if duplicate_line >= 0: - error(filename, linenum, 'build/include', 4, - '"%s" already included at %s:%s' % - (include, filename, duplicate_line)) - elif (include.endswith('.cc') and - os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)): - error(filename, linenum, 'build/include', 4, - 'Do not include .cc files from other packages') - elif not _THIRD_PARTY_HEADERS_PATTERN.match(include): - include_state.include_list[-1].append((include, linenum)) - - # We want to ensure that headers appear in the right order: - # 1) for foo.cc, foo.h (preferred location) - # 2) c system files - # 3) cpp system files - # 4) for foo.cc, foo.h (deprecated location) - # 5) other google headers - # - # We classify each include statement as one of those 5 types - # using a number of techniques. The include_state object keeps - # track of the highest type seen, and complains if we see a - # lower type after that. - error_message = include_state.CheckNextIncludeOrder( - _ClassifyInclude(fileinfo, include, is_system)) - if error_message: - error(filename, linenum, 'build/include_order', 4, - '%s. Should be: %s.h, c system, c++ system, other.' % - (error_message, fileinfo.BaseName())) - canonical_include = include_state.CanonicalizeAlphabeticalOrder(include) - if not include_state.IsInAlphabeticalOrder( - clean_lines, linenum, canonical_include): - error(filename, linenum, 'build/include_alpha', 4, - 'Include "%s" not in alphabetical order' % include) - include_state.SetLastHeader(canonical_include) - - - -def _GetTextInside(text, start_pattern): - r"""Retrieves all the text between matching open and close parentheses. - - Given a string of lines and a regular expression string, retrieve all the text - following the expression and between opening punctuation symbols like - (, [, or {, and the matching close-punctuation symbol. This properly nested - occurrences of the punctuations, so for the text like - printf(a(), b(c())); - a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'. - start_pattern must match string having an open punctuation symbol at the end. - - Args: - text: The lines to extract text. Its comments and strings must be elided. - It can be single line and can span multiple lines. - start_pattern: The regexp string indicating where to start extracting - the text. - Returns: - The extracted text. - None if either the opening string or ending punctuation could not be found. - """ - # TODO(unknown): Audit cpplint.py to see what places could be profitably - # rewritten to use _GetTextInside (and use inferior regexp matching today). - - # Give opening punctuations to get the matching close-punctuations. - matching_punctuation = {'(': ')', '{': '}', '[': ']'} - closing_punctuation = set(matching_punctuation.itervalues()) - - # Find the position to start extracting text. - match = re.search(start_pattern, text, re.M) - if not match: # start_pattern not found in text. - return None - start_position = match.end(0) - - assert start_position > 0, ( - 'start_pattern must ends with an opening punctuation.') - assert text[start_position - 1] in matching_punctuation, ( - 'start_pattern must ends with an opening punctuation.') - # Stack of closing punctuations we expect to have in text after position. - punctuation_stack = [matching_punctuation[text[start_position - 1]]] - position = start_position - while punctuation_stack and position < len(text): - if text[position] == punctuation_stack[-1]: - punctuation_stack.pop() - elif text[position] in closing_punctuation: - # A closing punctuation without matching opening punctuations. - return None - elif text[position] in matching_punctuation: - punctuation_stack.append(matching_punctuation[text[position]]) - position += 1 - if punctuation_stack: - # Opening punctuations left without matching close-punctuations. - return None - # punctuations match. - return text[start_position:position - 1] - - -# Patterns for matching call-by-reference parameters. -# -# Supports nested templates up to 2 levels deep using this messy pattern: -# < (?: < (?: < [^<>]* -# > -# | [^<>] )* -# > -# | [^<>] )* -# > -_RE_PATTERN_IDENT = r'[_a-zA-Z]\w*' # =~ [[:alpha:]][[:alnum:]]* -_RE_PATTERN_TYPE = ( - r'(?:const\s+)?(?:typename\s+|class\s+|struct\s+|union\s+|enum\s+)?' - r'(?:\w|' - r'\s*<(?:<(?:<[^<>]*>|[^<>])*>|[^<>])*>|' - r'::)+') -# A call-by-reference parameter ends with '& identifier'. -_RE_PATTERN_REF_PARAM = re.compile( - r'(' + _RE_PATTERN_TYPE + r'(?:\s*(?:\bconst\b|[*]))*\s*' - r'&\s*' + _RE_PATTERN_IDENT + r')\s*(?:=[^,()]+)?[,)]') -# A call-by-const-reference parameter either ends with 'const& identifier' -# or looks like 'const type& identifier' when 'type' is atomic. -_RE_PATTERN_CONST_REF_PARAM = ( - r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT + - r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')') - - -def CheckLanguage(filename, clean_lines, linenum, file_extension, - include_state, nesting_state, error): - """Checks rules from the 'C++ language rules' section of cppguide.html. - - Some of these rules are hard to test (function overloading, using - uint32 inappropriately), but we do the best we can. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - file_extension: The extension (without the dot) of the filename. - include_state: An _IncludeState instance in which the headers are inserted. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - # If the line is empty or consists of entirely a comment, no need to - # check it. - line = clean_lines.elided[linenum] - if not line: - return - - match = _RE_PATTERN_INCLUDE.search(line) - if match: - CheckIncludeLine(filename, clean_lines, linenum, include_state, error) - return - - # Reset include state across preprocessor directives. This is meant - # to silence warnings for conditional includes. - match = Match(r'^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b', line) - if match: - include_state.ResetSection(match.group(1)) - - # Make Windows paths like Unix. - fullname = os.path.abspath(filename).replace('\\', '/') - - # Perform other checks now that we are sure that this is not an include line - CheckCasts(filename, clean_lines, linenum, error) - CheckGlobalStatic(filename, clean_lines, linenum, error) - CheckPrintf(filename, clean_lines, linenum, error) - - if file_extension == 'h': - # TODO(unknown): check that 1-arg constructors are explicit. - # How to tell it's a constructor? - # (handled in CheckForNonStandardConstructs for now) - # TODO(unknown): check that classes declare or disable copy/assign - # (level 1 error) - pass - - # Check if people are using the verboten C basic types. The only exception - # we regularly allow is "unsigned short port" for port. - if Search(r'\bshort port\b', line): - if not Search(r'\bunsigned short port\b', line): - error(filename, linenum, 'runtime/int', 4, - 'Use "unsigned short" for ports, not "short"') - else: - match = Search(r'\b(short|long(?! +double)|long long)\b', line) - if match: - error(filename, linenum, 'runtime/int', 4, - 'Use int16/int64/etc, rather than the C type %s' % match.group(1)) - - # Check if some verboten operator overloading is going on - # TODO(unknown): catch out-of-line unary operator&: - # class X {}; - # int operator&(const X& x) { return 42; } // unary operator& - # The trick is it's hard to tell apart from binary operator&: - # class Y { int operator&(const Y& x) { return 23; } }; // binary operator& - if Search(r'\boperator\s*&\s*\(\s*\)', line): - error(filename, linenum, 'runtime/operator', 4, - 'Unary operator& is dangerous. Do not use it.') - - # Check for suspicious usage of "if" like - # } if (a == b) { - if Search(r'\}\s*if\s*\(', line): - error(filename, linenum, 'readability/braces', 4, - 'Did you mean "else if"? If not, start a new line for "if".') - - # Check for potential format string bugs like printf(foo). - # We constrain the pattern not to pick things like DocidForPrintf(foo). - # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str()) - # TODO(unknown): Catch the following case. Need to change the calling - # convention of the whole function to process multiple line to handle it. - # printf( - # boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line); - printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(') - if printf_args: - match = Match(r'([\w.\->()]+)$', printf_args) - if match and match.group(1) != '__VA_ARGS__': - function_name = re.search(r'\b((?:string)?printf)\s*\(', - line, re.I).group(1) - error(filename, linenum, 'runtime/printf', 4, - 'Potential format string bug. Do %s("%%s", %s) instead.' - % (function_name, match.group(1))) - - # Check for potential memset bugs like memset(buf, sizeof(buf), 0). - match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line) - if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)): - error(filename, linenum, 'runtime/memset', 4, - 'Did you mean "memset(%s, 0, %s)"?' - % (match.group(1), match.group(2))) - - if Search(r'\busing namespace\b', line): - error(filename, linenum, 'build/namespaces', 5, - 'Do not use namespace using-directives. ' - 'Use using-declarations instead.') - - # Detect variable-length arrays. - match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line) - if (match and match.group(2) != 'return' and match.group(2) != 'delete' and - match.group(3).find(']') == -1): - # Split the size using space and arithmetic operators as delimiters. - # If any of the resulting tokens are not compile time constants then - # report the error. - tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3)) - is_const = True - skip_next = False - for tok in tokens: - if skip_next: - skip_next = False - continue - - if Search(r'sizeof\(.+\)', tok): continue - if Search(r'arraysize\(\w+\)', tok): continue - - tok = tok.lstrip('(') - tok = tok.rstrip(')') - if not tok: continue - if Match(r'\d+', tok): continue - if Match(r'0[xX][0-9a-fA-F]+', tok): continue - if Match(r'k[A-Z0-9]\w*', tok): continue - if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue - if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue - # A catch all for tricky sizeof cases, including 'sizeof expression', - # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)' - # requires skipping the next token because we split on ' ' and '*'. - if tok.startswith('sizeof'): - skip_next = True - continue - is_const = False - break - if not is_const: - error(filename, linenum, 'runtime/arrays', 1, - 'Do not use variable-length arrays. Use an appropriately named ' - "('k' followed by CamelCase) compile-time constant for the size.") - - # Check for use of unnamed namespaces in header files. Registration - # macros are typically OK, so we allow use of "namespace {" on lines - # that end with backslashes. - if (file_extension == 'h' - and Search(r'\bnamespace\s*{', line) - and line[-1] != '\\'): - error(filename, linenum, 'build/namespaces', 4, - 'Do not use unnamed namespaces in header files. See ' - 'http://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces' - ' for more information.') - - -def CheckGlobalStatic(filename, clean_lines, linenum, error): - """Check for unsafe global or static objects. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Match two lines at a time to support multiline declarations - if linenum + 1 < clean_lines.NumLines() and not Search(r'[;({]', line): - line += clean_lines.elided[linenum + 1].strip() - - # Check for people declaring static/global STL strings at the top level. - # This is dangerous because the C++ language does not guarantee that - # globals with constructors are initialized before the first access. - match = Match( - r'((?:|static +)(?:|const +))string +([a-zA-Z0-9_:]+)\b(.*)', - line) - - # Remove false positives: - # - String pointers (as opposed to values). - # string *pointer - # const string *pointer - # string const *pointer - # string *const pointer - # - # - Functions and template specializations. - # string Function(... - # string Class::Method(... - # - # - Operators. These are matched separately because operator names - # cross non-word boundaries, and trying to match both operators - # and functions at the same time would decrease accuracy of - # matching identifiers. - # string Class::operator*() - if (match and - not Search(r'\bstring\b(\s+const)?\s*\*\s*(const\s+)?\w', line) and - not Search(r'\boperator\W', line) and - not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\(([^"]|$)', match.group(3))): - error(filename, linenum, 'runtime/string', 4, - 'For a static/global string constant, use a C style string instead: ' - '"%schar %s[]".' % - (match.group(1), match.group(2))) - - if Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line): - error(filename, linenum, 'runtime/init', 4, - 'You seem to be initializing a member variable with itself.') - - -def CheckPrintf(filename, clean_lines, linenum, error): - """Check for printf related issues. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # When snprintf is used, the second argument shouldn't be a literal. - match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line) - if match and match.group(2) != '0': - # If 2nd arg is zero, snprintf is used to calculate size. - error(filename, linenum, 'runtime/printf', 3, - 'If you can, use sizeof(%s) instead of %s as the 2nd arg ' - 'to snprintf.' % (match.group(1), match.group(2))) - - # Check if some verboten C functions are being used. - if Search(r'\bsprintf\s*\(', line): - error(filename, linenum, 'runtime/printf', 5, - 'Never use sprintf. Use snprintf instead.') - match = Search(r'\b(strcpy|strcat)\s*\(', line) - if match: - error(filename, linenum, 'runtime/printf', 4, - 'Almost always, snprintf is better than %s' % match.group(1)) - - -def IsDerivedFunction(clean_lines, linenum): - """Check if current line contains an inherited function. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line contains a function with "override" - virt-specifier. - """ - # Scan back a few lines for start of current function - for i in xrange(linenum, max(-1, linenum - 10), -1): - match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i]) - if match: - # Look for "override" after the matching closing parenthesis - line, _, closing_paren = CloseExpression( - clean_lines, i, len(match.group(1))) - return (closing_paren >= 0 and - Search(r'\boverride\b', line[closing_paren:])) - return False - - -def IsOutOfLineMethodDefinition(clean_lines, linenum): - """Check if current line contains an out-of-line method definition. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line contains an out-of-line method definition. - """ - # Scan back a few lines for start of current function - for i in xrange(linenum, max(-1, linenum - 10), -1): - if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]): - return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None - return False - - -def IsInitializerList(clean_lines, linenum): - """Check if current line is inside constructor initializer list. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line appears to be inside constructor initializer - list, False otherwise. - """ - for i in xrange(linenum, 1, -1): - line = clean_lines.elided[i] - if i == linenum: - remove_function_body = Match(r'^(.*)\{\s*$', line) - if remove_function_body: - line = remove_function_body.group(1) - - if Search(r'\s:\s*\w+[({]', line): - # A lone colon tend to indicate the start of a constructor - # initializer list. It could also be a ternary operator, which - # also tend to appear in constructor initializer lists as - # opposed to parameter lists. - return True - if Search(r'\}\s*,\s*$', line): - # A closing brace followed by a comma is probably the end of a - # brace-initialized member in constructor initializer list. - return True - if Search(r'[{};]\s*$', line): - # Found one of the following: - # - A closing brace or semicolon, probably the end of the previous - # function. - # - An opening brace, probably the start of current class or namespace. - # - # Current line is probably not inside an initializer list since - # we saw one of those things without seeing the starting colon. - return False - - # Got to the beginning of the file without seeing the start of - # constructor initializer list. - return False - - -def CheckForNonConstReference(filename, clean_lines, linenum, - nesting_state, error): - """Check for non-const references. - - Separate from CheckLanguage since it scans backwards from current - line, instead of scanning forward. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - # Do nothing if there is no '&' on current line. - line = clean_lines.elided[linenum] - if '&' not in line: - return - - # If a function is inherited, current function doesn't have much of - # a choice, so any non-const references should not be blamed on - # derived function. - if IsDerivedFunction(clean_lines, linenum): - return - - # Don't warn on out-of-line method definitions, as we would warn on the - # in-line declaration, if it isn't marked with 'override'. - if IsOutOfLineMethodDefinition(clean_lines, linenum): - return - - # Long type names may be broken across multiple lines, usually in one - # of these forms: - # LongType - # ::LongTypeContinued &identifier - # LongType:: - # LongTypeContinued &identifier - # LongType< - # ...>::LongTypeContinued &identifier - # - # If we detected a type split across two lines, join the previous - # line to current line so that we can match const references - # accordingly. - # - # Note that this only scans back one line, since scanning back - # arbitrary number of lines would be expensive. If you have a type - # that spans more than 2 lines, please use a typedef. - if linenum > 1: - previous = None - if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line): - # previous_line\n + ::current_line - previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$', - clean_lines.elided[linenum - 1]) - elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line): - # previous_line::\n + current_line - previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$', - clean_lines.elided[linenum - 1]) - if previous: - line = previous.group(1) + line.lstrip() - else: - # Check for templated parameter that is split across multiple lines - endpos = line.rfind('>') - if endpos > -1: - (_, startline, startpos) = ReverseCloseExpression( - clean_lines, linenum, endpos) - if startpos > -1 and startline < linenum: - # Found the matching < on an earlier line, collect all - # pieces up to current line. - line = '' - for i in xrange(startline, linenum + 1): - line += clean_lines.elided[i].strip() - - # Check for non-const references in function parameters. A single '&' may - # found in the following places: - # inside expression: binary & for bitwise AND - # inside expression: unary & for taking the address of something - # inside declarators: reference parameter - # We will exclude the first two cases by checking that we are not inside a - # function body, including one that was just introduced by a trailing '{'. - # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare]. - if (nesting_state.previous_stack_top and - not (isinstance(nesting_state.previous_stack_top, _ClassInfo) or - isinstance(nesting_state.previous_stack_top, _NamespaceInfo))): - # Not at toplevel, not within a class, and not within a namespace - return - - # Avoid initializer lists. We only need to scan back from the - # current line for something that starts with ':'. - # - # We don't need to check the current line, since the '&' would - # appear inside the second set of parentheses on the current line as - # opposed to the first set. - if linenum > 0: - for i in xrange(linenum - 1, max(0, linenum - 10), -1): - previous_line = clean_lines.elided[i] - if not Search(r'[),]\s*$', previous_line): - break - if Match(r'^\s*:\s+\S', previous_line): - return - - # Avoid preprocessors - if Search(r'\\\s*$', line): - return - - # Avoid constructor initializer lists - if IsInitializerList(clean_lines, linenum): - return - - # We allow non-const references in a few standard places, like functions - # called "swap()" or iostream operators like "<<" or ">>". Do not check - # those function parameters. - # - # We also accept & in static_assert, which looks like a function but - # it's actually a declaration expression. - whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|' - r'operator\s*[<>][<>]|' - r'static_assert|COMPILE_ASSERT' - r')\s*\(') - if Search(whitelisted_functions, line): - return - elif not Search(r'\S+\([^)]*$', line): - # Don't see a whitelisted function on this line. Actually we - # didn't see any function name on this line, so this is likely a - # multi-line parameter list. Try a bit harder to catch this case. - for i in xrange(2): - if (linenum > i and - Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])): - return - - decls = ReplaceAll(r'{[^}]*}', ' ', line) # exclude function body - for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls): - if not Match(_RE_PATTERN_CONST_REF_PARAM, parameter): - error(filename, linenum, 'runtime/references', 2, - 'Is this a non-const reference? ' - 'If so, make const or use a pointer: ' + - ReplaceAll(' *<', '<', parameter)) - - -def CheckCasts(filename, clean_lines, linenum, error): - """Various cast related checks. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Check to see if they're using an conversion function cast. - # I just try to capture the most common basic types, though there are more. - # Parameterless conversion functions, such as bool(), are allowed as they are - # probably a member operator declaration or default constructor. - match = Search( - r'(\bnew\s+|\S<\s*(?:const\s+)?)?\b' - r'(int|float|double|bool|char|int32|uint32|int64|uint64)' - r'(\([^)].*)', line) - expecting_function = ExpectingFunctionArgs(clean_lines, linenum) - if match and not expecting_function: - matched_type = match.group(2) - - # matched_new_or_template is used to silence two false positives: - # - New operators - # - Template arguments with function types - # - # For template arguments, we match on types immediately following - # an opening bracket without any spaces. This is a fast way to - # silence the common case where the function type is the first - # template argument. False negative with less-than comparison is - # avoided because those operators are usually followed by a space. - # - # function // bracket + no space = false positive - # value < double(42) // bracket + space = true positive - matched_new_or_template = match.group(1) - - # Avoid arrays by looking for brackets that come after the closing - # parenthesis. - if Match(r'\([^()]+\)\s*\[', match.group(3)): - return - - # Other things to ignore: - # - Function pointers - # - Casts to pointer types - # - Placement new - # - Alias declarations - matched_funcptr = match.group(3) - if (matched_new_or_template is None and - not (matched_funcptr and - (Match(r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(', - matched_funcptr) or - matched_funcptr.startswith('(*)'))) and - not Match(r'\s*using\s+\S+\s*=\s*' + matched_type, line) and - not Search(r'new\(\S+\)\s*' + matched_type, line)): - error(filename, linenum, 'readability/casting', 4, - 'Using deprecated casting style. ' - 'Use static_cast<%s>(...) instead' % - matched_type) - - if not expecting_function: - CheckCStyleCast(filename, clean_lines, linenum, 'static_cast', - r'\((int|float|double|bool|char|u?int(16|32|64))\)', error) - - # This doesn't catch all cases. Consider (const char * const)"hello". - # - # (char *) "foo" should always be a const_cast (reinterpret_cast won't - # compile). - if CheckCStyleCast(filename, clean_lines, linenum, 'const_cast', - r'\((char\s?\*+\s?)\)\s*"', error): - pass - else: - # Check pointer casts for other than string constants - CheckCStyleCast(filename, clean_lines, linenum, 'reinterpret_cast', - r'\((\w+\s?\*+\s?)\)', error) - - # In addition, we look for people taking the address of a cast. This - # is dangerous -- casts can assign to temporaries, so the pointer doesn't - # point where you think. - # - # Some non-identifier character is required before the '&' for the - # expression to be recognized as a cast. These are casts: - # expression = &static_cast(temporary()); - # function(&(int*)(temporary())); - # - # This is not a cast: - # reference_type&(int* function_param); - match = Search( - r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|' - r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line) - if match: - # Try a better error message when the & is bound to something - # dereferenced by the casted pointer, as opposed to the casted - # pointer itself. - parenthesis_error = False - match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<', line) - if match: - _, y1, x1 = CloseExpression(clean_lines, linenum, len(match.group(1))) - if x1 >= 0 and clean_lines.elided[y1][x1] == '(': - _, y2, x2 = CloseExpression(clean_lines, y1, x1) - if x2 >= 0: - extended_line = clean_lines.elided[y2][x2:] - if y2 < clean_lines.NumLines() - 1: - extended_line += clean_lines.elided[y2 + 1] - if Match(r'\s*(?:->|\[)', extended_line): - parenthesis_error = True - - if parenthesis_error: - error(filename, linenum, 'readability/casting', 4, - ('Are you taking an address of something dereferenced ' - 'from a cast? Wrapping the dereferenced expression in ' - 'parentheses will make the binding more obvious')) - else: - error(filename, linenum, 'runtime/casting', 4, - ('Are you taking an address of a cast? ' - 'This is dangerous: could be a temp var. ' - 'Take the address before doing the cast, rather than after')) - - -def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error): - """Checks for a C-style cast by looking for the pattern. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - cast_type: The string for the C++ cast to recommend. This is either - reinterpret_cast, static_cast, or const_cast, depending. - pattern: The regular expression used to find C-style casts. - error: The function to call with any errors found. - - Returns: - True if an error was emitted. - False otherwise. - """ - line = clean_lines.elided[linenum] - match = Search(pattern, line) - if not match: - return False - - # Exclude lines with keywords that tend to look like casts - context = line[0:match.start(1) - 1] - if Match(r'.*\b(?:sizeof|alignof|alignas|[_A-Z][_A-Z0-9]*)\s*$', context): - return False - - # Try expanding current context to see if we one level of - # parentheses inside a macro. - if linenum > 0: - for i in xrange(linenum - 1, max(0, linenum - 5), -1): - context = clean_lines.elided[i] + context - if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context): - return False - - # operator++(int) and operator--(int) - if context.endswith(' operator++') or context.endswith(' operator--'): - return False - - # A single unnamed argument for a function tends to look like old - # style cast. If we see those, don't issue warnings for deprecated - # casts, instead issue warnings for unnamed arguments where - # appropriate. - # - # These are things that we want warnings for, since the style guide - # explicitly require all parameters to be named: - # Function(int); - # Function(int) { - # ConstMember(int) const; - # ConstMember(int) const { - # ExceptionMember(int) throw (...); - # ExceptionMember(int) throw (...) { - # PureVirtual(int) = 0; - # [](int) -> bool { - # - # These are functions of some sort, where the compiler would be fine - # if they had named parameters, but people often omit those - # identifiers to reduce clutter: - # (FunctionPointer)(int); - # (FunctionPointer)(int) = value; - # Function((function_pointer_arg)(int)) - # Function((function_pointer_arg)(int), int param) - # ; - # <(FunctionPointerTemplateArgument)(int)>; - remainder = line[match.end(0):] - if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)', - remainder): - # Looks like an unnamed parameter. - - # Don't warn on any kind of template arguments. - if Match(r'^\s*>', remainder): - return False - - # Don't warn on assignments to function pointers, but keep warnings for - # unnamed parameters to pure virtual functions. Note that this pattern - # will also pass on assignments of "0" to function pointers, but the - # preferred values for those would be "nullptr" or "NULL". - matched_zero = Match(r'^\s=\s*(\S+)\s*;', remainder) - if matched_zero and matched_zero.group(1) != '0': - return False - - # Don't warn on function pointer declarations. For this we need - # to check what came before the "(type)" string. - if Match(r'.*\)\s*$', line[0:match.start(0)]): - return False - - # Don't warn if the parameter is named with block comments, e.g.: - # Function(int /*unused_param*/); - raw_line = clean_lines.raw_lines[linenum] - if '/*' in raw_line: - return False - - # Passed all filters, issue warning here. - error(filename, linenum, 'readability/function', 3, - 'All parameters should be named in a function') - return True - - # At this point, all that should be left is actual casts. - error(filename, linenum, 'readability/casting', 4, - 'Using C-style cast. Use %s<%s>(...) instead' % - (cast_type, match.group(1))) - - return True - - -def ExpectingFunctionArgs(clean_lines, linenum): - """Checks whether where function type arguments are expected. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - - Returns: - True if the line at 'linenum' is inside something that expects arguments - of function types. - """ - line = clean_lines.elided[linenum] - return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or - (linenum >= 2 and - (Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$', - clean_lines.elided[linenum - 1]) or - Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$', - clean_lines.elided[linenum - 2]) or - Search(r'\bstd::m?function\s*\<\s*$', - clean_lines.elided[linenum - 1])))) - - -_HEADERS_CONTAINING_TEMPLATES = ( - ('', ('deque',)), - ('', ('unary_function', 'binary_function', - 'plus', 'minus', 'multiplies', 'divides', 'modulus', - 'negate', - 'equal_to', 'not_equal_to', 'greater', 'less', - 'greater_equal', 'less_equal', - 'logical_and', 'logical_or', 'logical_not', - 'unary_negate', 'not1', 'binary_negate', 'not2', - 'bind1st', 'bind2nd', - 'pointer_to_unary_function', - 'pointer_to_binary_function', - 'ptr_fun', - 'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t', - 'mem_fun_ref_t', - 'const_mem_fun_t', 'const_mem_fun1_t', - 'const_mem_fun_ref_t', 'const_mem_fun1_ref_t', - 'mem_fun_ref', - )), - ('', ('numeric_limits',)), - ('', ('list',)), - ('', ('map', 'multimap',)), - ('', ('allocator',)), - ('', ('queue', 'priority_queue',)), - ('', ('set', 'multiset',)), - ('', ('stack',)), - ('', ('char_traits', 'basic_string',)), - ('', ('tuple',)), - ('', ('pair',)), - ('', ('vector',)), - - # gcc extensions. - # Note: std::hash is their hash, ::hash is our hash - ('', ('hash_map', 'hash_multimap',)), - ('', ('hash_set', 'hash_multiset',)), - ('', ('slist',)), - ) - -_RE_PATTERN_STRING = re.compile(r'\bstring\b') - -_re_pattern_algorithm_header = [] -for _template in ('copy', 'max', 'min', 'min_element', 'sort', 'swap', - 'transform'): - # Match max(..., ...), max(..., ...), but not foo->max, foo.max or - # type::max(). - _re_pattern_algorithm_header.append( - (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'), - _template, - '')) - -_re_pattern_templates = [] -for _header, _templates in _HEADERS_CONTAINING_TEMPLATES: - for _template in _templates: - _re_pattern_templates.append( - (re.compile(r'(\<|\b)' + _template + r'\s*\<'), - _template + '<>', - _header)) - - -def FilesBelongToSameModule(filename_cc, filename_h): - """Check if these two filenames belong to the same module. - - The concept of a 'module' here is a as follows: - foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the - same 'module' if they are in the same directory. - some/path/public/xyzzy and some/path/internal/xyzzy are also considered - to belong to the same module here. - - If the filename_cc contains a longer path than the filename_h, for example, - '/absolute/path/to/base/sysinfo.cc', and this file would include - 'base/sysinfo.h', this function also produces the prefix needed to open the - header. This is used by the caller of this function to more robustly open the - header file. We don't have access to the real include paths in this context, - so we need this guesswork here. - - Known bugs: tools/base/bar.cc and base/bar.h belong to the same module - according to this implementation. Because of this, this function gives - some false positives. This should be sufficiently rare in practice. - - Args: - filename_cc: is the path for the .cc file - filename_h: is the path for the header path - - Returns: - Tuple with a bool and a string: - bool: True if filename_cc and filename_h belong to the same module. - string: the additional prefix needed to open the header file. - """ - - if not filename_cc.endswith('.cc'): - return (False, '') - filename_cc = filename_cc[:-len('.cc')] - if filename_cc.endswith('_unittest'): - filename_cc = filename_cc[:-len('_unittest')] - elif filename_cc.endswith('_test'): - filename_cc = filename_cc[:-len('_test')] - filename_cc = filename_cc.replace('/public/', '/') - filename_cc = filename_cc.replace('/internal/', '/') - - if not filename_h.endswith('.h'): - return (False, '') - filename_h = filename_h[:-len('.h')] - if filename_h.endswith('-inl'): - filename_h = filename_h[:-len('-inl')] - filename_h = filename_h.replace('/public/', '/') - filename_h = filename_h.replace('/internal/', '/') - - files_belong_to_same_module = filename_cc.endswith(filename_h) - common_path = '' - if files_belong_to_same_module: - common_path = filename_cc[:-len(filename_h)] - return files_belong_to_same_module, common_path - - -def UpdateIncludeState(filename, include_dict, io=codecs): - """Fill up the include_dict with new includes found from the file. - - Args: - filename: the name of the header to read. - include_dict: a dictionary in which the headers are inserted. - io: The io factory to use to read the file. Provided for testability. - - Returns: - True if a header was successfully added. False otherwise. - """ - headerfile = None - try: - headerfile = io.open(filename, 'r', 'utf8', 'replace') - except IOError: - return False - linenum = 0 - for line in headerfile: - linenum += 1 - clean_line = CleanseComments(line) - match = _RE_PATTERN_INCLUDE.search(clean_line) - if match: - include = match.group(2) - include_dict.setdefault(include, linenum) - return True - - -def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, - io=codecs): - """Reports for missing stl includes. - - This function will output warnings to make sure you are including the headers - necessary for the stl containers and functions that you use. We only give one - reason to include a header. For example, if you use both equal_to<> and - less<> in a .h file, only one (the latter in the file) of these will be - reported as a reason to include the . - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - include_state: An _IncludeState instance. - error: The function to call with any errors found. - io: The IO factory to use to read the header file. Provided for unittest - injection. - """ - required = {} # A map of header name to linenumber and the template entity. - # Example of required: { '': (1219, 'less<>') } - - for linenum in xrange(clean_lines.NumLines()): - line = clean_lines.elided[linenum] - if not line or line[0] == '#': - continue - - # String is special -- it is a non-templatized type in STL. - matched = _RE_PATTERN_STRING.search(line) - if matched: - # Don't warn about strings in non-STL namespaces: - # (We check only the first match per line; good enough.) - prefix = line[:matched.start()] - if prefix.endswith('std::') or not prefix.endswith('::'): - required[''] = (linenum, 'string') - - for pattern, template, header in _re_pattern_algorithm_header: - if pattern.search(line): - required[header] = (linenum, template) - - # The following function is just a speed up, no semantics are changed. - if not '<' in line: # Reduces the cpu time usage by skipping lines. - continue - - for pattern, template, header in _re_pattern_templates: - if pattern.search(line): - required[header] = (linenum, template) - - # The policy is that if you #include something in foo.h you don't need to - # include it again in foo.cc. Here, we will look at possible includes. - # Let's flatten the include_state include_list and copy it into a dictionary. - include_dict = dict([item for sublist in include_state.include_list - for item in sublist]) - - # Did we find the header for this file (if any) and successfully load it? - header_found = False - - # Use the absolute path so that matching works properly. - abs_filename = FileInfo(filename).FullName() - - # For Emacs's flymake. - # If cpplint is invoked from Emacs's flymake, a temporary file is generated - # by flymake and that file name might end with '_flymake.cc'. In that case, - # restore original file name here so that the corresponding header file can be - # found. - # e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h' - # instead of 'foo_flymake.h' - abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename) - - # include_dict is modified during iteration, so we iterate over a copy of - # the keys. - header_keys = include_dict.keys() - for header in header_keys: - (same_module, common_path) = FilesBelongToSameModule(abs_filename, header) - fullpath = common_path + header - if same_module and UpdateIncludeState(fullpath, include_dict, io): - header_found = True - - # If we can't find the header file for a .cc, assume it's because we don't - # know where to look. In that case we'll give up as we're not sure they - # didn't include it in the .h file. - # TODO(unknown): Do a better job of finding .h files so we are confident that - # not having the .h file means there isn't one. - if filename.endswith('.cc') and not header_found: - return - - # All the lines have been processed, report the errors found. - for required_header_unstripped in required: - template = required[required_header_unstripped][1] - if required_header_unstripped.strip('<>"') not in include_dict: - error(filename, required[required_header_unstripped][0], - 'build/include_what_you_use', 4, - 'Add #include ' + required_header_unstripped + ' for ' + template) - - -_RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<') - - -def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error): - """Check that make_pair's template arguments are deduced. - - G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are - specified explicitly, and such use isn't intended in any case. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line) - if match: - error(filename, linenum, 'build/explicit_make_pair', - 4, # 4 = high confidence - 'For C++11-compatibility, omit template arguments from make_pair' - ' OR use pair directly OR if appropriate, construct a pair directly') - - -def CheckDefaultLambdaCaptures(filename, clean_lines, linenum, error): - """Check that default lambda captures are not used. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # A lambda introducer specifies a default capture if it starts with "[=" - # or if it starts with "[&" _not_ followed by an identifier. - match = Match(r'^(.*)\[\s*(?:=|&[^\w])', line) - if match: - # Found a potential error, check what comes after the lambda-introducer. - # If it's not open parenthesis (for lambda-declarator) or open brace - # (for compound-statement), it's not a lambda. - line, _, pos = CloseExpression(clean_lines, linenum, len(match.group(1))) - if pos >= 0 and Match(r'^\s*[{(]', line[pos:]): - error(filename, linenum, 'build/c++11', - 4, # 4 = high confidence - 'Default lambda captures are an unapproved C++ feature.') - - -def CheckRedundantVirtual(filename, clean_lines, linenum, error): - """Check if line contains a redundant "virtual" function-specifier. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - # Look for "virtual" on current line. - line = clean_lines.elided[linenum] - virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line) - if not virtual: return - - # Ignore "virtual" keywords that are near access-specifiers. These - # are only used in class base-specifier and do not apply to member - # functions. - if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or - Match(r'^\s+(public|protected|private)\b', virtual.group(3))): - return - - # Ignore the "virtual" keyword from virtual base classes. Usually - # there is a column on the same line in these cases (virtual base - # classes are rare in google3 because multiple inheritance is rare). - if Match(r'^.*[^:]:[^:].*$', line): return - - # Look for the next opening parenthesis. This is the start of the - # parameter list (possibly on the next line shortly after virtual). - # TODO(unknown): doesn't work if there are virtual functions with - # decltype() or other things that use parentheses, but csearch suggests - # that this is rare. - end_col = -1 - end_line = -1 - start_col = len(virtual.group(2)) - for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())): - line = clean_lines.elided[start_line][start_col:] - parameter_list = Match(r'^([^(]*)\(', line) - if parameter_list: - # Match parentheses to find the end of the parameter list - (_, end_line, end_col) = CloseExpression( - clean_lines, start_line, start_col + len(parameter_list.group(1))) - break - start_col = 0 - - if end_col < 0: - return # Couldn't find end of parameter list, give up - - # Look for "override" or "final" after the parameter list - # (possibly on the next few lines). - for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())): - line = clean_lines.elided[i][end_col:] - match = Search(r'\b(override|final)\b', line) - if match: - error(filename, linenum, 'readability/inheritance', 4, - ('"virtual" is redundant since function is ' - 'already declared as "%s"' % match.group(1))) - - # Set end_col to check whole lines after we are done with the - # first line. - end_col = 0 - if Search(r'[^\w]\s*$', line): - break - - -def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error): - """Check if line contains a redundant "override" or "final" virt-specifier. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - # Look for closing parenthesis nearby. We need one to confirm where - # the declarator ends and where the virt-specifier starts to avoid - # false positives. - line = clean_lines.elided[linenum] - declarator_end = line.rfind(')') - if declarator_end >= 0: - fragment = line[declarator_end:] - else: - if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0: - fragment = line - else: - return - - # Check that at most one of "override" or "final" is present, not both - if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment): - error(filename, linenum, 'readability/inheritance', 4, - ('"override" is redundant since function is ' - 'already declared as "final"')) - - - - -# Returns true if we are at a new block, and it is directly -# inside of a namespace. -def IsBlockInNameSpace(nesting_state, is_forward_declaration): - """Checks that the new block is directly in a namespace. - - Args: - nesting_state: The _NestingState object that contains info about our state. - is_forward_declaration: If the class is a forward declared class. - Returns: - Whether or not the new block is directly in a namespace. - """ - if is_forward_declaration: - if len(nesting_state.stack) >= 1 and ( - isinstance(nesting_state.stack[-1], _NamespaceInfo)): - return True - else: - return False - - return (len(nesting_state.stack) > 1 and - nesting_state.stack[-1].check_namespace_indentation and - isinstance(nesting_state.stack[-2], _NamespaceInfo)) - - -def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item, - raw_lines_no_comments, linenum): - """This method determines if we should apply our namespace indentation check. - - Args: - nesting_state: The current nesting state. - is_namespace_indent_item: If we just put a new class on the stack, True. - If the top of the stack is not a class, or we did not recently - add the class, False. - raw_lines_no_comments: The lines without the comments. - linenum: The current line number we are processing. - - Returns: - True if we should apply our namespace indentation check. Currently, it - only works for classes and namespaces inside of a namespace. - """ - - is_forward_declaration = IsForwardClassDeclaration(raw_lines_no_comments, - linenum) - - if not (is_namespace_indent_item or is_forward_declaration): - return False - - # If we are in a macro, we do not want to check the namespace indentation. - if IsMacroDefinition(raw_lines_no_comments, linenum): - return False - - return IsBlockInNameSpace(nesting_state, is_forward_declaration) - - -# Call this method if the line is directly inside of a namespace. -# If the line above is blank (excluding comments) or the start of -# an inner namespace, it cannot be indented. -def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum, - error): - line = raw_lines_no_comments[linenum] - if Match(r'^\s+', line): - error(filename, linenum, 'runtime/indentation_namespace', 4, - 'Do not indent within a namespace') - - -def ProcessLine(filename, file_extension, clean_lines, line, - include_state, function_state, nesting_state, error, - extra_check_functions=[]): - """Processes a single line in the file. - - Args: - filename: Filename of the file that is being processed. - file_extension: The extension (dot not included) of the file. - clean_lines: An array of strings, each representing a line of the file, - with comments stripped. - line: Number of line being processed. - include_state: An _IncludeState instance in which the headers are inserted. - function_state: A _FunctionState instance which counts function lines, etc. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: A callable to which errors are reported, which takes 4 arguments: - filename, line number, error level, and message - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ - raw_lines = clean_lines.raw_lines - ParseNolintSuppressions(filename, raw_lines[line], line, error) - nesting_state.Update(filename, clean_lines, line, error) - CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line, - error) - if nesting_state.InAsmBlock(): return - CheckForFunctionLengths(filename, clean_lines, line, function_state, error) - CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error) - CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error) - CheckLanguage(filename, clean_lines, line, file_extension, include_state, - nesting_state, error) - CheckForNonConstReference(filename, clean_lines, line, nesting_state, error) - CheckForNonStandardConstructs(filename, clean_lines, line, - nesting_state, error) - CheckVlogArguments(filename, clean_lines, line, error) - CheckPosixThreading(filename, clean_lines, line, error) - CheckInvalidIncrement(filename, clean_lines, line, error) - CheckMakePairUsesDeduction(filename, clean_lines, line, error) - CheckDefaultLambdaCaptures(filename, clean_lines, line, error) - CheckRedundantVirtual(filename, clean_lines, line, error) - CheckRedundantOverrideOrFinal(filename, clean_lines, line, error) - for check_fn in extra_check_functions: - check_fn(filename, clean_lines, line, error) - -def FlagCxx11Features(filename, clean_lines, linenum, error): - """Flag those c++11 features that we only allow in certain places. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Flag unapproved C++11 headers. - include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line) - if include and include.group(1) in ('cfenv', - 'condition_variable', - 'fenv.h', - 'future', - 'mutex', - 'thread', - 'chrono', - 'ratio', - 'regex', - 'system_error', - ): - error(filename, linenum, 'build/c++11', 5, - ('<%s> is an unapproved C++11 header.') % include.group(1)) - - # The only place where we need to worry about C++11 keywords and library - # features in preprocessor directives is in macro definitions. - if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return - - # These are classes and free functions. The classes are always - # mentioned as std::*, but we only catch the free functions if - # they're not found by ADL. They're alphabetical by header. - for top_name in ( - # type_traits - 'alignment_of', - 'aligned_union', - ): - if Search(r'\bstd::%s\b' % top_name, line): - error(filename, linenum, 'build/c++11', 5, - ('std::%s is an unapproved C++11 class or function. Send c-style ' - 'an example of where it would make your code more readable, and ' - 'they may let you use it.') % top_name) - - -def ProcessFileData(filename, file_extension, lines, error, - extra_check_functions=[]): - """Performs lint checks and reports any errors to the given error function. - - Args: - filename: Filename of the file that is being processed. - file_extension: The extension (dot not included) of the file. - lines: An array of strings, each representing a line of the file, with the - last element being empty if the file is terminated with a newline. - error: A callable to which errors are reported, which takes 4 arguments: - filename, line number, error level, and message - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ - lines = (['// marker so line numbers and indices both start at 1'] + lines + - ['// marker so line numbers end in a known way']) - - include_state = _IncludeState() - function_state = _FunctionState() - nesting_state = NestingState() - - ResetNolintSuppressions() - - CheckForCopyright(filename, lines, error) - - RemoveMultiLineComments(filename, lines, error) - clean_lines = CleansedLines(lines) - - if file_extension == 'h': - CheckForHeaderGuard(filename, clean_lines, error) - - for line in xrange(clean_lines.NumLines()): - ProcessLine(filename, file_extension, clean_lines, line, - include_state, function_state, nesting_state, error, - extra_check_functions) - FlagCxx11Features(filename, clean_lines, line, error) - nesting_state.CheckCompletedBlocks(filename, error) - - CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error) - - # Check that the .cc file has included its header if it exists. - if file_extension == 'cc': - CheckHeaderFileIncluded(filename, include_state, error) - - # We check here rather than inside ProcessLine so that we see raw - # lines rather than "cleaned" lines. - CheckForBadCharacters(filename, lines, error) - - CheckForNewlineAtEOF(filename, lines, error) - -def ProcessConfigOverrides(filename): - """ Loads the configuration files and processes the config overrides. - - Args: - filename: The name of the file being processed by the linter. - - Returns: - False if the current |filename| should not be processed further. - """ - - abs_filename = os.path.abspath(filename) - cfg_filters = [] - keep_looking = True - while keep_looking: - abs_path, base_name = os.path.split(abs_filename) - if not base_name: - break # Reached the root directory. - - cfg_file = os.path.join(abs_path, "CPPLINT.cfg") - abs_filename = abs_path - if not os.path.isfile(cfg_file): - continue - - try: - with open(cfg_file) as file_handle: - for line in file_handle: - line, _, _ = line.partition('#') # Remove comments. - if not line.strip(): - continue - - name, _, val = line.partition('=') - name = name.strip() - val = val.strip() - if name == 'set noparent': - keep_looking = False - elif name == 'filter': - cfg_filters.append(val) - elif name == 'exclude_files': - # When matching exclude_files pattern, use the base_name of - # the current file name or the directory name we are processing. - # For example, if we are checking for lint errors in /foo/bar/baz.cc - # and we found the .cfg file at /foo/CPPLINT.cfg, then the config - # file's "exclude_files" filter is meant to be checked against "bar" - # and not "baz" nor "bar/baz.cc". - if base_name: - pattern = re.compile(val) - if pattern.match(base_name): - sys.stderr.write('Ignoring "%s": file excluded by "%s". ' - 'File path component "%s" matches ' - 'pattern "%s"\n' % - (filename, cfg_file, base_name, val)) - return False - elif name == 'linelength': - global _line_length - try: - _line_length = int(val) - except ValueError: - sys.stderr.write('Line length must be numeric.') - else: - sys.stderr.write( - 'Invalid configuration option (%s) in file %s\n' % - (name, cfg_file)) - - except IOError: - sys.stderr.write( - "Skipping config file '%s': Can't open for reading\n" % cfg_file) - keep_looking = False - - # Apply all the accumulated filters in reverse order (top-level directory - # config options having the least priority). - for filter in reversed(cfg_filters): - _AddFilters(filter) - - return True - - -def ProcessFile(filename, vlevel, extra_check_functions=[]): - """Does google-lint on a single file. - - Args: - filename: The name of the file to parse. - - vlevel: The level of errors to report. Every error of confidence - >= verbose_level will be reported. 0 is a good default. - - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ - - _SetVerboseLevel(vlevel) - _BackupFilters() - - if not ProcessConfigOverrides(filename): - _RestoreFilters() - return - - lf_lines = [] - crlf_lines = [] - try: - # Support the UNIX convention of using "-" for stdin. Note that - # we are not opening the file with universal newline support - # (which codecs doesn't support anyway), so the resulting lines do - # contain trailing '\r' characters if we are reading a file that - # has CRLF endings. - # If after the split a trailing '\r' is present, it is removed - # below. - if filename == '-': - lines = codecs.StreamReaderWriter(sys.stdin, - codecs.getreader('utf8'), - codecs.getwriter('utf8'), - 'replace').read().split('\n') - else: - lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n') - - # Remove trailing '\r'. - # The -1 accounts for the extra trailing blank line we get from split() - for linenum in range(len(lines) - 1): - if lines[linenum].endswith('\r'): - lines[linenum] = lines[linenum].rstrip('\r') - crlf_lines.append(linenum + 1) - else: - lf_lines.append(linenum + 1) - - except IOError: - sys.stderr.write( - "Skipping input '%s': Can't open for reading\n" % filename) - _RestoreFilters() - return - - # Note, if no dot is found, this will give the entire filename as the ext. - file_extension = filename[filename.rfind('.') + 1:] - - # When reading from stdin, the extension is unknown, so no cpplint tests - # should rely on the extension. - if filename != '-' and file_extension not in _valid_extensions: - sys.stderr.write('Ignoring %s; not a valid file name ' - '(%s)\n' % (filename, ', '.join(_valid_extensions))) - else: - ProcessFileData(filename, file_extension, lines, Error, - extra_check_functions) - - # If end-of-line sequences are a mix of LF and CR-LF, issue - # warnings on the lines with CR. - # - # Don't issue any warnings if all lines are uniformly LF or CR-LF, - # since critique can handle these just fine, and the style guide - # doesn't dictate a particular end of line sequence. - # - # We can't depend on os.linesep to determine what the desired - # end-of-line sequence should be, since that will return the - # server-side end-of-line sequence. - if lf_lines and crlf_lines: - # Warn on every line with CR. An alternative approach might be to - # check whether the file is mostly CRLF or just LF, and warn on the - # minority, we bias toward LF here since most tools prefer LF. - for linenum in crlf_lines: - Error(filename, linenum, 'whitespace/newline', 1, - 'Unexpected \\r (^M) found; better to use only \\n') - - sys.stderr.write('Done processing %s\n' % filename) - _RestoreFilters() - - -def PrintUsage(message): - """Prints a brief usage string and exits, optionally with an error message. - - Args: - message: The optional error message. - """ - sys.stderr.write(_USAGE) - if message: - sys.exit('\nFATAL ERROR: ' + message) - else: - sys.exit(1) - - -def PrintCategories(): - """Prints a list of all the error-categories used by error messages. - - These are the categories used to filter messages via --filter. - """ - sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES)) - sys.exit(0) - - -def ParseArguments(args): - """Parses the command line arguments. - - This may set the output format and verbosity level as side-effects. - - Args: - args: The command line arguments: - - Returns: - The list of filenames to lint. - """ - try: - (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=', - 'counting=', - 'filter=', - 'root=', - 'linelength=', - 'extensions=']) - except getopt.GetoptError: - PrintUsage('Invalid arguments.') - - verbosity = _VerboseLevel() - output_format = _OutputFormat() - filters = '' - counting_style = '' - - for (opt, val) in opts: - if opt == '--help': - PrintUsage(None) - elif opt == '--output': - if val not in ('emacs', 'vs7', 'eclipse'): - PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.') - output_format = val - elif opt == '--verbose': - verbosity = int(val) - elif opt == '--filter': - filters = val - if not filters: - PrintCategories() - elif opt == '--counting': - if val not in ('total', 'toplevel', 'detailed'): - PrintUsage('Valid counting options are total, toplevel, and detailed') - counting_style = val - elif opt == '--root': - global _root - _root = val - elif opt == '--linelength': - global _line_length - try: - _line_length = int(val) - except ValueError: - PrintUsage('Line length must be digits.') - elif opt == '--extensions': - global _valid_extensions - try: - _valid_extensions = set(val.split(',')) - except ValueError: - PrintUsage('Extensions must be comma seperated list.') - - if not filenames: - PrintUsage('No files were specified.') - - _SetOutputFormat(output_format) - _SetVerboseLevel(verbosity) - _SetFilters(filters) - _SetCountingStyle(counting_style) - - return filenames - - -def main(): - filenames = ParseArguments(sys.argv[1:]) - - # Change stderr to write with replacement characters so we don't die - # if we try to print something containing non-ASCII characters. - sys.stderr = codecs.StreamReaderWriter(sys.stderr, - codecs.getreader('utf8'), - codecs.getwriter('utf8'), - 'replace') - - _cpplint_state.ResetErrorCounts() - for filename in filenames: - ProcessFile(filename, _cpplint_state.verbose_level) - _cpplint_state.PrintErrorCounts() - # VR fix - otherwise all cpplint are errors from script launch - # sys.exit(_cpplint_state.error_count > 0) - sys.exit(0) - - -if __name__ == '__main__': - main() diff --git a/scripts/cpplint_to_cppcheckxml.py b/scripts/cpplint_to_cppcheckxml.py deleted file mode 100755 index 7a3813c7..00000000 --- a/scripts/cpplint_to_cppcheckxml.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python - -# Convert output from Google's cpplint.py to the cppcheck XML format for -# consumption by the Jenkins cppcheck plugin. - -# Reads from stdin and writes to stderr (to mimic cppcheck) - - -import sys -import re -import cgi - -def cpplint_score_to_cppcheck_severity(score): - # I'm making this up - if score == 1: - return 'style' - elif score == 2: - return 'style' - elif score == 3: - return 'warning' - elif score == 4: - return 'warning' - elif score == 5: - return 'error' - - -def parse(): - # TODO: do this properly, using the xml module. - # Write header - sys.stderr.write('''\n''') - # VR : sys.stderr.write('''\n''') - # Add from VR + [ - sys.stderr.write('''\n''') - sys.stderr.write(''' \n''') - sys.stderr.write(''' \n''') - # -] - - # Do line-by-line conversion - r = re.compile('([^:]*):([0-9]*): ([^\[]*)\[([^\]]*)\] \[([0-9]*)\].*') - - for l in sys.stdin.readlines(): - m = r.match(l.strip()) - if not m: - continue - g = m.groups() - if len(g) != 5: - continue - fname, lineno, msg, label, score = g - severity = cpplint_score_to_cppcheck_severity(int(score)) - # VR : sys.stderr.write('''\n'''%(fname, lineno, label, severity, msg)) - # Add from VR + [ - sys.stderr.write(''' \n'''%(label, severity, cgi.escape(msg,quote=True))) - sys.stderr.write(''' \n'''%(fname, lineno)) - sys.stderr.write(''' \n''') - # -] - - # Write footer - # Add from VR + [ - sys.stderr.write(''' \n''') - # -] - sys.stderr.write('''\n''') - - -if __name__ == '__main__': - parse() - -- cgit v1.2.3 From a38d6085a8dd8c76a0c6db593a4fc6891e253cdb Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 18 Jun 2019 21:54:35 +0200 Subject: Separate the 2 examples as one requires also Eigen3 --- src/Bottleneck_distance/example/CMakeLists.txt | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/Bottleneck_distance/example/CMakeLists.txt b/src/Bottleneck_distance/example/CMakeLists.txt index 8987ac39..3d65963a 100644 --- a/src/Bottleneck_distance/example/CMakeLists.txt +++ b/src/Bottleneck_distance/example/CMakeLists.txt @@ -2,20 +2,24 @@ project(Bottleneck_distance_examples) if (NOT CGAL_VERSION VERSION_LESS 4.11.0) add_executable (bottleneck_basic_example bottleneck_basic_example.cpp) + + if (TBB_FOUND) + target_link_libraries(bottleneck_basic_example ${TBB_LIBRARIES}) + endif(TBB_FOUND) + + add_test(NAME Bottleneck_distance_example_basic COMMAND $) + +endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) + +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp) target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_PROGRAM_OPTIONS_LIBRARY}) if (TBB_FOUND) target_link_libraries(alpha_rips_persistence_bottleneck_distance ${TBB_LIBRARIES}) - target_link_libraries(bottleneck_basic_example ${TBB_LIBRARIES}) endif(TBB_FOUND) - add_test(NAME Bottleneck_distance_example_basic COMMAND $) add_test(NAME Bottleneck_distance_example_alpha_rips_persistence_bottleneck COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.15" "-m" "0.12" "-d" "3" "-p" "3") - - install(TARGETS bottleneck_basic_example DESTINATION bin) - install(TARGETS alpha_rips_persistence_bottleneck_distance DESTINATION bin) - -endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) +endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) -- cgit v1.2.3 From f570d29b6e2fcf838f1a946fc641a9bba3e6f99f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 18 Jun 2019 22:04:13 +0200 Subject: try to brew update --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index b8a080ea..6787f4f7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -58,6 +58,9 @@ addons: - cgal - python3 +before_install: + - brew update >/dev/null + # When installing through libcgal-dev apt, CMake Error at CGAL Exports.cmake The imported target "CGAL::CGAL Qt5" references the file install: - python3 -m pip install --upgrade pip setuptools wheel -- cgit v1.2.3 From 1b0bbe522f5700941b3b8166e0bab9d93d5304ad Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 18 Jun 2019 22:20:46 +0200 Subject: cgal_mini_sphere_3d requires Eigen3 --- src/Simplex_tree/example/CMakeLists.txt | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt index 857e8518..1a6de60a 100644 --- a/src/Simplex_tree/example/CMakeLists.txt +++ b/src/Simplex_tree/example/CMakeLists.txt @@ -18,10 +18,6 @@ add_test(NAME Simplex_tree_example_simple_simplex_tree COMMAND $) -install(TARGETS Simplex_tree_example_from_cliques_of_graph DESTINATION bin) -install(TARGETS Simplex_tree_example_simple_simplex_tree DESTINATION bin) -install(TARGETS Simplex_tree_example_mini_simplex_tree DESTINATION bin) - # An example with Simplex-tree using CGAL alpha_shapes_3 if(GMP_FOUND AND CGAL_FOUND) add_executable ( Simplex_tree_example_alpha_shapes_3_from_off example_alpha_shapes_3_simplex_tree_from_off_file.cpp ) @@ -32,8 +28,9 @@ if(GMP_FOUND AND CGAL_FOUND) add_test(NAME Simplex_tree_example_alpha_shapes_3_from_off COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/bunny_5000.off") - install(TARGETS Simplex_tree_example_alpha_shapes_3_from_off DESTINATION bin) +endif() +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( Simplex_tree_example_cech_complex_cgal_mini_sphere_3d cech_complex_cgal_mini_sphere_3d.cpp ) target_link_libraries(Simplex_tree_example_cech_complex_cgal_mini_sphere_3d ${Boost_PROGRAM_OPTIONS_LIBRARY} ${CGAL_LIBRARY}) if (TBB_FOUND) @@ -41,14 +38,11 @@ if(GMP_FOUND AND CGAL_FOUND) endif() add_test(NAME Simplex_tree_example_cech_complex_cgal_mini_sphere_3d COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" -r 0.3 -d 3) - - install(TARGETS Simplex_tree_example_alpha_shapes_3_from_off DESTINATION bin) -endif() +endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) add_executable ( Simplex_tree_example_graph_expansion_with_blocker graph_expansion_with_blocker.cpp ) if (TBB_FOUND) target_link_libraries(Simplex_tree_example_graph_expansion_with_blocker ${TBB_LIBRARIES}) endif() -add_test(NAME Simplex_tree_example_graph_expansion_with_blocker COMMAND $) -install(TARGETS Simplex_tree_example_graph_expansion_with_blocker DESTINATION bin) +add_test(NAME Simplex_tree_example_graph_expansion_with_blocker COMMAND $) -- cgit v1.2.3 From f9f6288c4815c08d561fc8b2a26fa0f0be1404f2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 19 Jun 2019 07:34:47 +0200 Subject: update osx_image as brew is out dated --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6787f4f7..0eaeb67e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,32 +9,32 @@ matrix: include: # A. Mac OSX - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 1. Only examples and associated tests - CMAKE_EXAMPLE='ON' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 2. Only unitary tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='ON' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 3. Only utilities and associated tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='ON' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 4. Only doxygen documentation - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='doxygen' # Issue with sphinx-build with sphinx 2.0.1 # - os: osx - # osx_image: xcode9.4 + # osx_image: xcode10.2 # compiler: clang # env: # # 5. Only Python, associated tests and sphinx documentation -- cgit v1.2.3 From 9d4e6f28ceeb362c9d751cbac730a13d1b5d61c7 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 19 Jun 2019 08:56:56 +0200 Subject: CGAL 4.11 as minimal version --- src/GudhUI/CMakeLists.txt | 2 +- src/Simplex_tree/example/CMakeLists.txt | 4 ++-- .../modules/GUDHI_third_party_libraries.cmake | 2 ++ src/common/example/CMakeLists.txt | 24 ++++++++-------------- src/cython/CMakeLists.txt | 2 +- 5 files changed, 15 insertions(+), 19 deletions(-) diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt index ab29097b..9b73dcaa 100644 --- a/src/GudhUI/CMakeLists.txt +++ b/src/GudhUI/CMakeLists.txt @@ -9,7 +9,7 @@ if (OPENGL_FOUND) find_package(QGLViewer QUIET) if ( QGLVIEWER_FOUND) - if ( CGAL_FOUND AND NOT CGAL_VERSION VERSION_LESS 4.11.0) + if (NOT CGAL_VERSION VERSION_LESS 4.11.0) set(CMAKE_AUTOMOC ON) set(CMAKE_AUTOUIC ON) set(CMAKE_INCLUDE_CURRENT_DIR ON) diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt index 1a6de60a..f99b164c 100644 --- a/src/Simplex_tree/example/CMakeLists.txt +++ b/src/Simplex_tree/example/CMakeLists.txt @@ -19,7 +19,7 @@ add_executable ( Simplex_tree_example_mini_simplex_tree mini_simplex_tree.cpp ) add_test(NAME Simplex_tree_example_mini_simplex_tree COMMAND $) # An example with Simplex-tree using CGAL alpha_shapes_3 -if(GMP_FOUND AND CGAL_FOUND) +if(GMP_FOUND AND NOT CGAL_VERSION VERSION_LESS 4.11.0) add_executable ( Simplex_tree_example_alpha_shapes_3_from_off example_alpha_shapes_3_simplex_tree_from_off_file.cpp ) target_link_libraries(Simplex_tree_example_alpha_shapes_3_from_off ${GMP_LIBRARIES} ${CGAL_LIBRARY}) if (TBB_FOUND) @@ -38,7 +38,7 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) endif() add_test(NAME Simplex_tree_example_cech_complex_cgal_mini_sphere_3d COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" -r 0.3 -d 3) -endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) +endif () add_executable ( Simplex_tree_example_graph_expansion_with_blocker graph_expansion_with_blocker.cpp ) if (TBB_FOUND) diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index 477900ae..1dbddfd9 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -27,7 +27,9 @@ find_package(CGAL QUIET) if (CGAL_FOUND AND CGAL_VERSION VERSION_LESS 4.11.0) message("++ CGAL version ${CGAL_VERSION} is considered too old to be used by Gudhi.") unset(CGAL_FOUND) + unset(CGAL_VERSION) endif() + if(CGAL_FOUND) message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index b9263cc9..583a0027 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -6,8 +6,6 @@ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CM add_test(NAME Common_example_vector_double_off_reader COMMAND $ "alphacomplexdoc.off") -install(TARGETS vector_double_off_reader DESTINATION bin) - if (DIFF_PATH) # Do not forget to copy test results files in current binary dir file(COPY "vectordoubleoffreader_result.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) @@ -16,21 +14,17 @@ if (DIFF_PATH) ${CMAKE_CURRENT_BINARY_DIR}/vectordoubleoffreader_result.txt ${CMAKE_CURRENT_BINARY_DIR}/alphacomplexdoc.off.txt) endif() -if(CGAL_FOUND) +if(NOT CGAL_VERSION VERSION_LESS 4.11.0) add_executable ( cgal_3D_off_reader example_CGAL_3D_points_off_reader.cpp ) target_link_libraries(cgal_3D_off_reader ${CGAL_LIBRARY}) add_test(NAME Common_example_vector_cgal_3D_off_reader COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off") - - install(TARGETS cgal_3D_off_reader DESTINATION bin) - - # need CGAL 4.7 and Eigen3 - if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - add_executable ( cgal_off_reader example_CGAL_points_off_reader.cpp ) - target_link_libraries(cgal_off_reader ${CGAL_LIBRARY}) - add_test(NAME Common_example_vector_cgal_off_reader COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off") - install(TARGETS cgal_off_reader DESTINATION bin) - endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - endif() + +# requires CGAL and Eigen3 +if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + add_executable ( cgal_off_reader example_CGAL_points_off_reader.cpp ) + target_link_libraries(cgal_off_reader ${CGAL_LIBRARY}) + add_test(NAME Common_example_vector_cgal_off_reader COMMAND $ + "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off") +endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index af71bb3b..509a122e 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -412,7 +412,7 @@ if(PYTHONINTERP_FOUND) # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES") else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - message("++ Python documentation module will not be compiled because it requires a CGAL with Eigen3 version greater or equal than 4.8.1") + message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) else(SCIPY_FOUND) -- cgit v1.2.3 From 03a0f7abf10ecef1b5f9af1c32abaf47b7e7217d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 19 Jun 2019 09:30:22 +0200 Subject: Add brew cache mechanism --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0eaeb67e..aa0068f4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -43,6 +43,7 @@ matrix: cache: directories: - $HOME/.cache/pip + - $HOME/Library/Caches/Homebrew addons: homebrew: @@ -58,8 +59,8 @@ addons: - cgal - python3 -before_install: - - brew update >/dev/null +before_cache: + - brew cleanup # When installing through libcgal-dev apt, CMake Error at CGAL Exports.cmake The imported target "CGAL::CGAL Qt5" references the file install: -- cgit v1.2.3 From 68fadb4ebfe5b35a2eec99a9babafbee940a1b42 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 19 Jun 2019 11:07:42 +0200 Subject: Remove MathJax.COPYRIGHT as MathJax.js has been removed --- src/common/doc/MathJax.COPYRIGHT | 55 ---------------------------------------- 1 file changed, 55 deletions(-) delete mode 100644 src/common/doc/MathJax.COPYRIGHT diff --git a/src/common/doc/MathJax.COPYRIGHT b/src/common/doc/MathJax.COPYRIGHT deleted file mode 100644 index 077d6dc5..00000000 --- a/src/common/doc/MathJax.COPYRIGHT +++ /dev/null @@ -1,55 +0,0 @@ -Apache License - -Version 2.0, January 2004 - -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. - -"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: - - You must give any other recipients of the Work or Derivative Works a copy of this License; and - You must cause any modified files to carry prominent notices stating that You changed the files; and - You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and - If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. - - You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -- cgit v1.2.3 From 9f249e001089af45186aaf0d196d6300705eee31 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau <10407034+VincentRouvreau@users.noreply.github.com> Date: Wed, 26 Jun 2019 11:38:54 +0200 Subject: Reverse #aedd692 that changed copyright year --- src/cython/cython/cubical_complex.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cython/cython/cubical_complex.pyx b/src/cython/cython/cubical_complex.pyx index 53d79b92..509af6ca 100644 --- a/src/cython/cython/cubical_complex.pyx +++ b/src/cython/cython/cubical_complex.pyx @@ -13,7 +13,7 @@ from numpy import array as np_array Author(s): Vincent Rouvreau - Copyright (C) 2019 Inria + Copyright (C) 2016 Inria This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by -- cgit v1.2.3 From 2ffd579440e6f0158588f8fa905e2f1dc49a5158 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 26 Jun 2019 16:07:29 +0200 Subject: Change Python template and fix copyrights --- src/cython/cython/alpha_complex.pyx | 10 +++++----- src/cython/cython/bottleneck_distance.pyx | 10 +++++----- src/cython/cython/cubical_complex.pyx | 10 +++++----- src/cython/cython/euclidean_strong_witness_complex.pyx | 10 +++++----- src/cython/cython/euclidean_witness_complex.pyx | 10 +++++----- src/cython/cython/nerve_gic.pyx | 10 +++++----- src/cython/cython/off_reader.pyx | 10 +++++----- src/cython/cython/periodic_cubical_complex.pyx | 10 +++++----- src/cython/cython/persistence_graphical_tools.py | 10 +++++----- src/cython/cython/reader_utils.pyx | 10 +++++----- src/cython/cython/rips_complex.pyx | 10 +++++----- src/cython/cython/simplex_tree.pyx | 10 +++++----- src/cython/cython/strong_witness_complex.pyx | 10 +++++----- src/cython/cython/subsampling.pyx | 10 +++++----- src/cython/cython/tangential_complex.pyx | 10 +++++----- src/cython/cython/witness_complex.pyx | 10 +++++----- ...lpha_complex_diagram_persistence_from_off_file_example.py | 10 +++++----- src/cython/example/alpha_complex_from_points_example.py | 10 +++++----- .../example/alpha_rips_persistence_bottleneck_distance.py | 10 +++++----- src/cython/example/bottleneck_basic_example.py | 12 ++++++------ src/cython/example/coordinate_graph_induced_complex.py | 10 +++++----- ...ness_complex_diagram_persistence_from_off_file_example.py | 10 +++++----- ...ness_complex_diagram_persistence_from_off_file_example.py | 10 +++++----- src/cython/example/functional_graph_induced_complex.py | 10 +++++----- src/cython/example/gudhi_graphical_tools_example.py | 10 +++++----- src/cython/example/nerve_of_a_covering.py | 10 +++++----- ..._complex_barcode_persistence_from_perseus_file_example.py | 10 +++++----- .../example/random_cubical_complex_persistence_example.py | 10 +++++----- ...agram_persistence_from_correlation_matrix_file_example.py | 10 +++++----- ..._diagram_persistence_from_distance_matrix_file_example.py | 10 +++++----- ...rips_complex_diagram_persistence_from_off_file_example.py | 10 +++++----- src/cython/example/rips_complex_from_points_example.py | 10 +++++----- src/cython/example/rips_persistence_diagram.py | 12 ++++++------ src/cython/example/simplex_tree_example.py | 10 +++++----- src/cython/example/sparse_rips_persistence_diagram.py | 12 ++++++------ ...angential_complex_plain_homology_from_off_file_example.py | 10 +++++----- src/cython/example/voronoi_graph_induced_complex.py | 10 +++++----- .../example/witness_complex_from_nearest_landmark_table.py | 10 +++++----- src/cython/test/test_alpha_complex.py | 10 +++++----- src/cython/test/test_bottleneck_distance.py | 10 +++++----- src/cython/test/test_cover_complex.py | 10 +++++----- src/cython/test/test_cubical_complex.py | 10 +++++----- src/cython/test/test_euclidean_witness_complex.py | 10 +++++----- src/cython/test/test_reader_utils.py | 10 +++++----- src/cython/test/test_rips_complex.py | 10 +++++----- src/cython/test/test_simplex_tree.py | 10 +++++----- src/cython/test/test_subsampling.py | 10 +++++----- src/cython/test/test_tangential_complex.py | 10 +++++----- src/cython/test/test_witness_complex.py | 10 +++++----- 49 files changed, 248 insertions(+), 248 deletions(-) diff --git a/src/cython/cython/alpha_complex.pyx b/src/cython/cython/alpha_complex.pyx index 3861ae65..48c2a046 100644 --- a/src/cython/cython/alpha_complex.pyx +++ b/src/cython/cython/alpha_complex.pyx @@ -5,19 +5,19 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": diff --git a/src/cython/cython/bottleneck_distance.pyx b/src/cython/cython/bottleneck_distance.pyx index 1af027be..455a8112 100644 --- a/src/cython/cython/bottleneck_distance.pyx +++ b/src/cython/cython/bottleneck_distance.pyx @@ -3,19 +3,19 @@ from libcpp.vector cimport vector from libcpp.utility cimport pair import os -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) diff --git a/src/cython/cython/cubical_complex.pyx b/src/cython/cython/cubical_complex.pyx index 8f6c84eb..0dc133d1 100644 --- a/src/cython/cython/cubical_complex.pyx +++ b/src/cython/cython/cubical_complex.pyx @@ -7,19 +7,19 @@ import os from numpy import array as np_array -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2019 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": cdef cppclass Bitmap_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<>": diff --git a/src/cython/cython/euclidean_strong_witness_complex.pyx b/src/cython/cython/euclidean_strong_witness_complex.pyx index 101b06c9..e670e9af 100644 --- a/src/cython/cython/euclidean_strong_witness_complex.pyx +++ b/src/cython/cython/euclidean_strong_witness_complex.pyx @@ -2,19 +2,19 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi": cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface": diff --git a/src/cython/cython/euclidean_witness_complex.pyx b/src/cython/cython/euclidean_witness_complex.pyx index ff27f157..192f0221 100644 --- a/src/cython/cython/euclidean_witness_complex.pyx +++ b/src/cython/cython/euclidean_witness_complex.pyx @@ -2,19 +2,19 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi": cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface": diff --git a/src/cython/cython/nerve_gic.pyx b/src/cython/cython/nerve_gic.pyx index 6eeaf5fe..c81125b7 100644 --- a/src/cython/cython/nerve_gic.pyx +++ b/src/cython/cython/nerve_gic.pyx @@ -5,19 +5,19 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2018 Inria + Copyright (C) 2018 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Nerve_gic_interface.h" namespace "Gudhi": cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface": diff --git a/src/cython/cython/off_reader.pyx b/src/cython/cython/off_reader.pyx index f1e97532..9efd97ff 100644 --- a/src/cython/cython/off_reader.pyx +++ b/src/cython/cython/off_reader.pyx @@ -3,19 +3,19 @@ from libcpp.vector cimport vector from libcpp.string cimport string import os -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Off_reader_interface.h" namespace "Gudhi": vector[vector[double]] read_points_from_OFF_file(string off_file) diff --git a/src/cython/cython/periodic_cubical_complex.pyx b/src/cython/cython/periodic_cubical_complex.pyx index 3b50163e..724fadd4 100644 --- a/src/cython/cython/periodic_cubical_complex.pyx +++ b/src/cython/cython/periodic_cubical_complex.pyx @@ -7,19 +7,19 @@ import os from numpy import array as np_array -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2019 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface>": diff --git a/src/cython/cython/persistence_graphical_tools.py b/src/cython/cython/persistence_graphical_tools.py index 638d4d35..ead81d30 100644 --- a/src/cython/cython/persistence_graphical_tools.py +++ b/src/cython/cython/persistence_graphical_tools.py @@ -2,19 +2,19 @@ from os import path from math import isfinite import numpy as np -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau, Bertrand Michel - Copyright (C) 2019 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau, Bertrand Michel" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def __min_birth_max_death(persistence, band=0.): """This function returns (min_birth, max_death) from the persistence. diff --git a/src/cython/cython/reader_utils.pyx b/src/cython/cython/reader_utils.pyx index 87239b29..147fae71 100644 --- a/src/cython/cython/reader_utils.pyx +++ b/src/cython/cython/reader_utils.pyx @@ -7,19 +7,19 @@ from libcpp.pair cimport pair from os import path from numpy import array as np_array -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2019 Inria + Copyright (C) 2017 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2017 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Reader_utils_interface.h" namespace "Gudhi": vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator) diff --git a/src/cython/cython/rips_complex.pyx b/src/cython/cython/rips_complex.pyx index a6f4c0a4..b9a2331f 100644 --- a/src/cython/cython/rips_complex.pyx +++ b/src/cython/cython/rips_complex.pyx @@ -5,19 +5,19 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Rips_complex_interface.h" namespace "Gudhi": cdef cppclass Rips_complex_interface "Gudhi::rips_complex::Rips_complex_interface": diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx index 2947a766..8e791c17 100644 --- a/src/cython/cython/simplex_tree.pyx +++ b/src/cython/cython/simplex_tree.pyx @@ -6,19 +6,19 @@ from libcpp.string cimport string from numpy import array as np_array -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2019 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": cdef cppclass Simplex_tree_options_full_featured: diff --git a/src/cython/cython/strong_witness_complex.pyx b/src/cython/cython/strong_witness_complex.pyx index afb22ef3..8c155815 100644 --- a/src/cython/cython/strong_witness_complex.pyx +++ b/src/cython/cython/strong_witness_complex.pyx @@ -2,19 +2,19 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi": cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface": diff --git a/src/cython/cython/subsampling.pyx b/src/cython/cython/subsampling.pyx index 21419168..9b80e5e9 100644 --- a/src/cython/cython/subsampling.pyx +++ b/src/cython/cython/subsampling.pyx @@ -4,19 +4,19 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling": vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points) diff --git a/src/cython/cython/tangential_complex.pyx b/src/cython/cython/tangential_complex.pyx index f5d88898..1e20de30 100644 --- a/src/cython/cython/tangential_complex.pyx +++ b/src/cython/cython/tangential_complex.pyx @@ -5,19 +5,19 @@ from libcpp.string cimport string from libcpp cimport bool import os -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Tangential_complex_interface.h" namespace "Gudhi": cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface": diff --git a/src/cython/cython/witness_complex.pyx b/src/cython/cython/witness_complex.pyx index 82aa4bae..91046f57 100644 --- a/src/cython/cython/witness_complex.pyx +++ b/src/cython/cython/witness_complex.pyx @@ -2,19 +2,19 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" cdef extern from "Witness_complex_interface.h" namespace "Gudhi": cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface": diff --git a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py index 0957175a..d9925c22 100755 --- a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='AlphaComplex creation from ' 'points read in a OFF file.', diff --git a/src/cython/example/alpha_complex_from_points_example.py b/src/cython/example/alpha_complex_from_points_example.py index 4338ed87..a746998c 100755 --- a/src/cython/example/alpha_complex_from_points_example.py +++ b/src/cython/example/alpha_complex_from_points_example.py @@ -2,19 +2,19 @@ from gudhi import AlphaComplex, SimplexTree -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" print("#####################################################################") print("AlphaComplex creation from points") diff --git a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py index b5d6c0a7..92bf9d39 100755 --- a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py +++ b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py @@ -4,19 +4,19 @@ import gudhi import argparse import math -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='AlphaComplex and RipsComplex ' 'persistence creation from points read in ' diff --git a/src/cython/example/bottleneck_basic_example.py b/src/cython/example/bottleneck_basic_example.py index 4adc0785..f47e24d4 100755 --- a/src/cython/example/bottleneck_basic_example.py +++ b/src/cython/example/bottleneck_basic_example.py @@ -2,19 +2,19 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Francois Godi, Vincent Rouvreau + Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Francois Godi, Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]] diff --git a/src/cython/example/coordinate_graph_induced_complex.py b/src/cython/example/coordinate_graph_induced_complex.py index d066b20a..f30eaf3e 100755 --- a/src/cython/example/coordinate_graph_induced_complex.py +++ b/src/cython/example/coordinate_graph_induced_complex.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2018 Inria + Copyright (C) 2018 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='Coordinate GIC ' 'from points read in a OFF file.', diff --git a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py index 604d52c9..1a8de8bd 100755 --- a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='EuclideanStrongWitnessComplex creation from ' 'points read in a OFF file.', diff --git a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py index cd949dc4..9a17f8de 100755 --- a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='EuclideanWitnessComplex creation from ' 'points read in a OFF file.', diff --git a/src/cython/example/functional_graph_induced_complex.py b/src/cython/example/functional_graph_induced_complex.py index fcc4373d..f87c6837 100755 --- a/src/cython/example/functional_graph_induced_complex.py +++ b/src/cython/example/functional_graph_induced_complex.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2018 Inria + Copyright (C) 2018 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='Functional GIC ' 'from points read in a OFF file.', diff --git a/src/cython/example/gudhi_graphical_tools_example.py b/src/cython/example/gudhi_graphical_tools_example.py index a031b0d0..4f64c615 100755 --- a/src/cython/example/gudhi_graphical_tools_example.py +++ b/src/cython/example/gudhi_graphical_tools_example.py @@ -2,19 +2,19 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" print("#####################################################################") print("Show barcode persistence example") diff --git a/src/cython/example/nerve_of_a_covering.py b/src/cython/example/nerve_of_a_covering.py index 97042865..707f0631 100755 --- a/src/cython/example/nerve_of_a_covering.py +++ b/src/cython/example/nerve_of_a_covering.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2018 Inria + Copyright (C) 2018 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='Nerve of a covering creation ' 'from points read in a OFF file.', diff --git a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py index 0c9bf242..40a2fed1 100755 --- a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py +++ b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def is_file_perseus(file): num_lines = open(file).read().count('\n') diff --git a/src/cython/example/random_cubical_complex_persistence_example.py b/src/cython/example/random_cubical_complex_persistence_example.py index efbfcdf8..12db3f41 100755 --- a/src/cython/example/random_cubical_complex_persistence_example.py +++ b/src/cython/example/random_cubical_complex_persistence_example.py @@ -7,19 +7,19 @@ import argparse import operator -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='Random cubical complex.', epilog='Example: ' diff --git a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py index 5621f4a1..98be0123 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py @@ -4,19 +4,19 @@ import gudhi import sys import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2017 Inria + Copyright (C) 2017 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2017 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='RipsComplex creation from ' 'a correlation matrix read in a csv file.', diff --git a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py index d298de9a..f02dc0c0 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='RipsComplex creation from ' 'a distance matrix read in a csv file.', diff --git a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py index 39537e41..e1f8b85d 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='RipsComplex creation from ' 'points read in a OFF file.', diff --git a/src/cython/example/rips_complex_from_points_example.py b/src/cython/example/rips_complex_from_points_example.py index 26df89ac..f0d7a097 100755 --- a/src/cython/example/rips_complex_from_points_example.py +++ b/src/cython/example/rips_complex_from_points_example.py @@ -2,19 +2,19 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" print("#####################################################################") print("RipsComplex creation from points") diff --git a/src/cython/example/rips_persistence_diagram.py b/src/cython/example/rips_persistence_diagram.py index 9e6d7a3f..6b02eac9 100755 --- a/src/cython/example/rips_persistence_diagram.py +++ b/src/cython/example/rips_persistence_diagram.py @@ -2,19 +2,19 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Marc Glisse + Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Marc Glisse" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" print("#####################################################################") print("RipsComplex creation from points") diff --git a/src/cython/example/simplex_tree_example.py b/src/cython/example/simplex_tree_example.py index 51b8bfc7..30de00da 100755 --- a/src/cython/example/simplex_tree_example.py +++ b/src/cython/example/simplex_tree_example.py @@ -2,19 +2,19 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" print("#####################################################################") print("SimplexTree creation from insertion") diff --git a/src/cython/example/sparse_rips_persistence_diagram.py b/src/cython/example/sparse_rips_persistence_diagram.py index 9dc26f08..e58baf45 100755 --- a/src/cython/example/sparse_rips_persistence_diagram.py +++ b/src/cython/example/sparse_rips_persistence_diagram.py @@ -2,19 +2,19 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Marc Glisse + Author(s): Vincent Rouvreau - Copyright (C) 2018 Inria + Copyright (C) 2018 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Marc Glisse" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" +__license__ = "MIT" print("#####################################################################") print("Sparse RipsComplex creation from points") diff --git a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py index 49fd7026..075149fb 100755 --- a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py +++ b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='TangentialComplex creation from ' 'points read in a OFF file.', diff --git a/src/cython/example/voronoi_graph_induced_complex.py b/src/cython/example/voronoi_graph_induced_complex.py index 623601d6..1e62fb60 100755 --- a/src/cython/example/voronoi_graph_induced_complex.py +++ b/src/cython/example/voronoi_graph_induced_complex.py @@ -3,19 +3,19 @@ import gudhi import argparse -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2018 Inria + Copyright (C) 2018 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" +__license__ = "MIT" parser = argparse.ArgumentParser(description='Voronoi GIC ' 'from points read in a OFF file.', diff --git a/src/cython/example/witness_complex_from_nearest_landmark_table.py b/src/cython/example/witness_complex_from_nearest_landmark_table.py index 9a358fb1..5cf954bf 100755 --- a/src/cython/example/witness_complex_from_nearest_landmark_table.py +++ b/src/cython/example/witness_complex_from_nearest_landmark_table.py @@ -2,19 +2,19 @@ from gudhi import StrongWitnessComplex, SimplexTree -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" print("#####################################################################") print("WitnessComplex creation from nearest landmark table") diff --git a/src/cython/test/test_alpha_complex.py b/src/cython/test/test_alpha_complex.py index aac4c22a..a573115a 100755 --- a/src/cython/test/test_alpha_complex.py +++ b/src/cython/test/test_alpha_complex.py @@ -1,18 +1,18 @@ from gudhi import AlphaComplex, SimplexTree -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_empty_alpha(): diff --git a/src/cython/test/test_bottleneck_distance.py b/src/cython/test/test_bottleneck_distance.py index 5be1da5a..409fba9c 100755 --- a/src/cython/test/test_bottleneck_distance.py +++ b/src/cython/test/test_bottleneck_distance.py @@ -1,18 +1,18 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_basic_bottleneck(): diff --git a/src/cython/test/test_cover_complex.py b/src/cython/test/test_cover_complex.py index 7e99946d..1eb4ed37 100755 --- a/src/cython/test/test_cover_complex.py +++ b/src/cython/test/test_cover_complex.py @@ -1,18 +1,18 @@ from gudhi import CoverComplex -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2018 Inria + Copyright (C) 2018 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_empty_constructor(): diff --git a/src/cython/test/test_cubical_complex.py b/src/cython/test/test_cubical_complex.py index e06b3c07..6dc7fdfc 100755 --- a/src/cython/test/test_cubical_complex.py +++ b/src/cython/test/test_cubical_complex.py @@ -1,18 +1,18 @@ from gudhi import CubicalComplex -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_empty_constructor(): diff --git a/src/cython/test/test_euclidean_witness_complex.py b/src/cython/test/test_euclidean_witness_complex.py index 04c73409..c8510b1e 100755 --- a/src/cython/test/test_euclidean_witness_complex.py +++ b/src/cython/test/test_euclidean_witness_complex.py @@ -1,18 +1,18 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_empty_euclidean_witness_complex(): diff --git a/src/cython/test/test_reader_utils.py b/src/cython/test/test_reader_utils.py index 2166bb05..4b87adf2 100755 --- a/src/cython/test/test_reader_utils.py +++ b/src/cython/test/test_reader_utils.py @@ -1,19 +1,19 @@ import gudhi import numpy as np -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2017 Inria + Copyright (C) 2017 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2017 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_non_existing_csv_file(): diff --git a/src/cython/test/test_rips_complex.py b/src/cython/test/test_rips_complex.py index 4443fac5..975b447a 100755 --- a/src/cython/test/test_rips_complex.py +++ b/src/cython/test/test_rips_complex.py @@ -1,19 +1,19 @@ from gudhi import RipsComplex from math import sqrt -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_empty_rips(): diff --git a/src/cython/test/test_simplex_tree.py b/src/cython/test/test_simplex_tree.py index 8310566b..5c903d93 100755 --- a/src/cython/test/test_simplex_tree.py +++ b/src/cython/test/test_simplex_tree.py @@ -1,18 +1,18 @@ from gudhi import SimplexTree -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_insertion(): diff --git a/src/cython/test/test_subsampling.py b/src/cython/test/test_subsampling.py index eecb290a..f7a97539 100755 --- a/src/cython/test/test_subsampling.py +++ b/src/cython/test/test_subsampling.py @@ -1,18 +1,18 @@ import gudhi -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_write_off_file_for_tests(): diff --git a/src/cython/test/test_tangential_complex.py b/src/cython/test/test_tangential_complex.py index ce05b05f..aa3820d1 100755 --- a/src/cython/test/test_tangential_complex.py +++ b/src/cython/test/test_tangential_complex.py @@ -1,18 +1,18 @@ from gudhi import TangentialComplex, SimplexTree -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_tangential(): diff --git a/src/cython/test/test_witness_complex.py b/src/cython/test/test_witness_complex.py index abc3b3b9..70511107 100755 --- a/src/cython/test/test_witness_complex.py +++ b/src/cython/test/test_witness_complex.py @@ -1,18 +1,18 @@ from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau - Copyright (C) 2016 Inria + Copyright (C) 2016 Inria - Modification(s): - - YYYY/MM Author: Description of the modification + Modification(s): + - YYYY/MM Author: Description of the modification """ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" def test_empty_witness_complex(): -- cgit v1.2.3 From fd47a2d926e78d3b83e79da3c959fb73809d09e4 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 26 Jun 2019 16:59:58 +0200 Subject: Format Python files with black --- src/cython/cython/persistence_graphical_tools.py | 233 ++++++++++++++------- ...ex_diagram_persistence_from_off_file_example.py | 44 ++-- .../alpha_rips_persistence_bottleneck_distance.py | 48 +++-- src/cython/example/bottleneck_basic_example.py | 13 +- .../example/coordinate_graph_induced_complex.py | 44 ++-- ...ex_diagram_persistence_from_off_file_example.py | 52 +++-- ...ex_diagram_persistence_from_off_file_example.py | 51 +++-- .../example/functional_graph_induced_complex.py | 46 ++-- .../example/gudhi_graphical_tools_example.py | 12 +- src/cython/example/nerve_of_a_covering.py | 44 ++-- ...arcode_persistence_from_perseus_file_example.py | 26 ++- .../random_cubical_complex_persistence_example.py | 25 ++- ...istence_from_correlation_matrix_file_example.py | 47 +++-- ...ersistence_from_distance_matrix_file_example.py | 28 ++- ...ex_diagram_persistence_from_off_file_example.py | 48 +++-- .../example/rips_complex_from_points_example.py | 3 +- src/cython/example/rips_persistence_diagram.py | 3 +- .../example/sparse_rips_persistence_diagram.py | 5 +- ...complex_plain_homology_from_off_file_example.py | 42 ++-- .../example/voronoi_graph_induced_complex.py | 44 ++-- .../witness_complex_from_nearest_landmark_table.py | 12 +- src/cython/test/test_alpha_complex.py | 58 +++-- src/cython/test/test_bottleneck_distance.py | 8 +- src/cython/test/test_cover_complex.py | 59 +++--- src/cython/test/test_cubical_complex.py | 59 ++++-- src/cython/test/test_euclidean_witness_complex.py | 68 ++++-- src/cython/test/test_reader_utils.py | 105 +++++++--- src/cython/test/test_rips_complex.py | 92 ++++---- src/cython/test/test_simplex_tree.py | 133 ++++++++---- src/cython/test/test_subsampling.py | 140 +++++++++---- src/cython/test/test_tangential_complex.py | 16 +- src/cython/test/test_witness_complex.py | 40 ++-- 32 files changed, 1086 insertions(+), 562 deletions(-) diff --git a/src/cython/cython/persistence_graphical_tools.py b/src/cython/cython/persistence_graphical_tools.py index ead81d30..34803222 100644 --- a/src/cython/cython/persistence_graphical_tools.py +++ b/src/cython/cython/persistence_graphical_tools.py @@ -16,7 +16,8 @@ __author__ = "Vincent Rouvreau, Bertrand Michel" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -def __min_birth_max_death(persistence, band=0.): + +def __min_birth_max_death(persistence, band=0.0): """This function returns (min_birth, max_death) from the persistence. :param persistence: The persistence to plot. @@ -29,27 +30,47 @@ def __min_birth_max_death(persistence, band=0.): max_death = 0 min_birth = persistence[0][1][0] for interval in reversed(persistence): - if float(interval[1][1]) != float('inf'): + if float(interval[1][1]) != float("inf"): if float(interval[1][1]) > max_death: max_death = float(interval[1][1]) if float(interval[1][0]) > max_death: max_death = float(interval[1][0]) if float(interval[1][0]) < min_birth: min_birth = float(interval[1][0]) - if band > 0.: + if band > 0.0: max_death += band return (min_birth, max_death) + """ Only 13 colors for the palette """ -palette = ['#ff0000', '#00ff00', '#0000ff', '#00ffff', '#ff00ff', '#ffff00', - '#000000', '#880000', '#008800', '#000088', '#888800', '#880088', - '#008888'] - -def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6, - max_intervals=1000, max_barcodes=1000, - inf_delta=0.1, legend=False): +palette = [ + "#ff0000", + "#00ff00", + "#0000ff", + "#00ffff", + "#ff00ff", + "#ffff00", + "#000000", + "#880000", + "#008800", + "#000088", + "#888800", + "#880088", + "#008888", +] + + +def plot_persistence_barcode( + persistence=[], + persistence_file="", + alpha=0.6, + max_intervals=1000, + max_barcodes=1000, + inf_delta=0.1, + legend=False, +): """This function plots the persistence bar code from persistence values list or from a :doc:`persistence file `. @@ -78,11 +99,13 @@ def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6, import matplotlib.pyplot as plt import matplotlib.patches as mpatches - if persistence_file is not '': + if persistence_file is not "": if path.isfile(persistence_file): # Reset persistence persistence = [] - diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file) + diag = read_persistence_intervals_grouped_by_dimension( + persistence_file=persistence_file + ) for key in diag.keys(): for persistence_interval in diag[key]: persistence.append((key, persistence_interval)) @@ -91,44 +114,62 @@ def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6, return None if max_barcodes is not 1000: - print('Deprecated parameter. It has been replaced by max_intervals') + print("Deprecated parameter. It has been replaced by max_intervals") max_intervals = max_barcodes if max_intervals > 0 and max_intervals < len(persistence): # Sort by life time, then takes only the max_intervals elements - persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_intervals] + persistence = sorted( + persistence, + key=lambda life_time: life_time[1][1] - life_time[1][0], + reverse=True, + )[:max_intervals] persistence = sorted(persistence, key=lambda birth: birth[1][0]) (min_birth, max_death) = __min_birth_max_death(persistence) ind = 0 - delta = ((max_death - min_birth) * inf_delta) + delta = (max_death - min_birth) * inf_delta # Replace infinity values with max_death + delta for bar code to be more # readable infinity = max_death + delta axis_start = min_birth - delta # Draw horizontal bars in loop for interval in reversed(persistence): - if float(interval[1][1]) != float('inf'): + if float(interval[1][1]) != float("inf"): # Finite death case - plt.barh(ind, (interval[1][1] - interval[1][0]), height=0.8, - left = interval[1][0], alpha=alpha, - color = palette[interval[0]], - linewidth=0) + plt.barh( + ind, + (interval[1][1] - interval[1][0]), + height=0.8, + left=interval[1][0], + alpha=alpha, + color=palette[interval[0]], + linewidth=0, + ) else: # Infinite death case for diagram to be nicer - plt.barh(ind, (infinity - interval[1][0]), height=0.8, - left = interval[1][0], alpha=alpha, - color = palette[interval[0]], - linewidth=0) + plt.barh( + ind, + (infinity - interval[1][0]), + height=0.8, + left=interval[1][0], + alpha=alpha, + color=palette[interval[0]], + linewidth=0, + ) ind = ind + 1 if legend: dimensions = list(set(item[0] for item in persistence)) - plt.legend(handles=[mpatches.Patch(color=palette[dim], - label=str(dim)) for dim in dimensions], - loc='lower right') - plt.title('Persistence barcode') + plt.legend( + handles=[ + mpatches.Patch(color=palette[dim], label=str(dim)) + for dim in dimensions + ], + loc="lower right", + ) + plt.title("Persistence barcode") # Ends plot on infinity value and starts a little bit before min_birth plt.axis([axis_start, infinity, 0, ind]) return plt @@ -136,8 +177,17 @@ def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6, except ImportError: print("This function is not available, you may be missing matplotlib.") -def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, - band=0., max_intervals=1000, max_plots=1000, inf_delta=0.1, legend=False): + +def plot_persistence_diagram( + persistence=[], + persistence_file="", + alpha=0.6, + band=0.0, + max_intervals=1000, + max_plots=1000, + inf_delta=0.1, + legend=False, +): """This function plots the persistence diagram from persistence values list or from a :doc:`persistence file `. @@ -168,11 +218,13 @@ def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, import matplotlib.pyplot as plt import matplotlib.patches as mpatches - if persistence_file is not '': + if persistence_file is not "": if path.isfile(persistence_file): # Reset persistence persistence = [] - diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file) + diag = read_persistence_intervals_grouped_by_dimension( + persistence_file=persistence_file + ) for key in diag.keys(): for persistence_interval in diag[key]: persistence.append((key, persistence_interval)) @@ -181,15 +233,19 @@ def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, return None if max_plots is not 1000: - print('Deprecated parameter. It has been replaced by max_intervals') + print("Deprecated parameter. It has been replaced by max_intervals") max_intervals = max_plots if max_intervals > 0 and max_intervals < len(persistence): # Sort by life time, then takes only the max_intervals elements - persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_intervals] + persistence = sorted( + persistence, + key=lambda life_time: life_time[1][1] - life_time[1][0], + reverse=True, + )[:max_intervals] (min_birth, max_death) = __min_birth_max_death(persistence, band) - delta = ((max_death - min_birth) * inf_delta) + delta = (max_death - min_birth) * inf_delta # Replace infinity values with max_death + delta for diagram to be more # readable infinity = max_death + delta @@ -198,31 +254,41 @@ def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, # line display of equation : birth = death x = np.linspace(axis_start, infinity, 1000) # infinity line and text - plt.plot(x, x, color='k', linewidth=1.0) - plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha) - plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha) + plt.plot(x, x, color="k", linewidth=1.0) + plt.plot(x, [infinity] * len(x), linewidth=1.0, color="k", alpha=alpha) + plt.text(axis_start, infinity, r"$\infty$", color="k", alpha=alpha) # bootstrap band - if band > 0.: - plt.fill_between(x, x, x+band, alpha=alpha, facecolor='red') + if band > 0.0: + plt.fill_between(x, x, x + band, alpha=alpha, facecolor="red") # Draw points in loop for interval in reversed(persistence): - if float(interval[1][1]) != float('inf'): + if float(interval[1][1]) != float("inf"): # Finite death case - plt.scatter(interval[1][0], interval[1][1], alpha=alpha, - color = palette[interval[0]]) + plt.scatter( + interval[1][0], + interval[1][1], + alpha=alpha, + color=palette[interval[0]], + ) else: # Infinite death case for diagram to be nicer - plt.scatter(interval[1][0], infinity, alpha=alpha, - color = palette[interval[0]]) + plt.scatter( + interval[1][0], infinity, alpha=alpha, color=palette[interval[0]] + ) if legend: dimensions = list(set(item[0] for item in persistence)) - plt.legend(handles=[mpatches.Patch(color=palette[dim], label=str(dim)) for dim in dimensions]) - - plt.title('Persistence diagram') - plt.xlabel('Birth') - plt.ylabel('Death') + plt.legend( + handles=[ + mpatches.Patch(color=palette[dim], label=str(dim)) + for dim in dimensions + ] + ) + + plt.title("Persistence diagram") + plt.xlabel("Birth") + plt.ylabel("Death") # Ends plot on infinity value and starts a little bit before min_birth plt.axis([axis_start, infinity, axis_start, infinity + delta]) return plt @@ -230,10 +296,17 @@ def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, except ImportError: print("This function is not available, you may be missing matplotlib.") -def plot_persistence_density(persistence=[], persistence_file='', - nbins=300, bw_method=None, - max_intervals=1000, dimension=None, - cmap=None, legend=False): + +def plot_persistence_density( + persistence=[], + persistence_file="", + nbins=300, + bw_method=None, + max_intervals=1000, + dimension=None, + cmap=None, + legend=False, +): """This function plots the persistence density from persistence values list or from a :doc:`persistence file `. Be aware that this function does not distinguish the dimension, it is @@ -278,39 +351,53 @@ def plot_persistence_density(persistence=[], persistence_file='', import matplotlib.pyplot as plt from scipy.stats import kde - if persistence_file is not '': + if persistence_file is not "": if dimension is None: # All dimension case dimension = -1 if path.isfile(persistence_file): - persistence_dim = read_persistence_intervals_in_dimension(persistence_file=persistence_file, - only_this_dim=dimension) + persistence_dim = read_persistence_intervals_in_dimension( + persistence_file=persistence_file, only_this_dim=dimension + ) print(persistence_dim) else: print("file " + persistence_file + " not found.") return None if len(persistence) > 0: - persistence_dim = np.array([(dim_interval[1][0], dim_interval[1][1]) for dim_interval in persistence if (dim_interval[0] == dimension) or (dimension is None)]) - - persistence_dim = persistence_dim[np.isfinite(persistence_dim[:,1])] + persistence_dim = np.array( + [ + (dim_interval[1][0], dim_interval[1][1]) + for dim_interval in persistence + if (dim_interval[0] == dimension) or (dimension is None) + ] + ) + + persistence_dim = persistence_dim[np.isfinite(persistence_dim[:, 1])] if max_intervals > 0 and max_intervals < len(persistence_dim): # Sort by life time, then takes only the max_intervals elements - persistence_dim = np.array(sorted(persistence_dim, - key=lambda life_time: life_time[1]-life_time[0], - reverse=True)[:max_intervals]) + persistence_dim = np.array( + sorted( + persistence_dim, + key=lambda life_time: life_time[1] - life_time[0], + reverse=True, + )[:max_intervals] + ) # Set as numpy array birth and death (remove undefined values - inf and NaN) - birth = persistence_dim[:,0] - death = persistence_dim[:,1] + birth = persistence_dim[:, 0] + death = persistence_dim[:, 1] # line display of equation : birth = death x = np.linspace(death.min(), birth.max(), 1000) - plt.plot(x, x, color='k', linewidth=1.0) + plt.plot(x, x, color="k", linewidth=1.0) # Evaluate a gaussian kde on a regular grid of nbins x nbins over data extents - k = kde.gaussian_kde([birth,death], bw_method=bw_method) - xi, yi = np.mgrid[birth.min():birth.max():nbins*1j, death.min():death.max():nbins*1j] + k = kde.gaussian_kde([birth, death], bw_method=bw_method) + xi, yi = np.mgrid[ + birth.min() : birth.max() : nbins * 1j, + death.min() : death.max() : nbins * 1j, + ] zi = k(np.vstack([xi.flatten(), yi.flatten()])) # default cmap value cannot be done at argument definition level as matplotlib is not yet defined. @@ -322,10 +409,12 @@ def plot_persistence_density(persistence=[], persistence_file='', if legend: plt.colorbar() - plt.title('Persistence density') - plt.xlabel('Birth') - plt.ylabel('Death') + plt.title("Persistence density") + plt.xlabel("Birth") + plt.ylabel("Death") return plt except ImportError: - print("This function is not available, you may be missing matplotlib and/or scipy.") + print( + "This function is not available, you may be missing matplotlib and/or scipy." + ) diff --git a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py index d9925c22..b8f283b3 100755 --- a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py @@ -17,40 +17,48 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='AlphaComplex creation from ' - 'points read in a OFF file.', - epilog='Example: ' - 'example/alpha_complex_diagram_persistence_from_off_file_example.py ' - '-f ../data/points/tore3D_300.off -a 0.6' - '- Constructs a alpha complex with the ' - 'points from the given OFF file.') +parser = argparse.ArgumentParser( + description="AlphaComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/alpha_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 0.6" + "- Constructs a alpha complex with the " + "points from the given OFF file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5) -parser.add_argument("-b", "--band", type=float, default=0.) -parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams') +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) args = parser.parse_args() -with open(args.file, 'r') as f: +with open(args.file, "r") as f: first_line = f.readline() - if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): + if (first_line == "OFF\n") or (first_line == "nOFF\n"): print("#####################################################################") print("AlphaComplex creation from points read in a OFF file") - + message = "AlphaComplex with max_edge_length=" + repr(args.max_alpha_square) print(message) - + alpha_complex = gudhi.AlphaComplex(off_file=args.file) - simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square) - + simplex_tree = alpha_complex.create_simplex_tree( + max_alpha_square=args.max_alpha_square + ) + message = "Number of simplices=" + repr(simplex_tree.num_simplices()) print(message) - + diag = simplex_tree.persistence() - + print("betti_numbers()=") print(simplex_tree.betti_numbers()) - + if args.no_diagram == False: pplot = gudhi.plot_persistence_diagram(diag, band=args.band) pplot.show() diff --git a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py index 92bf9d39..086307ee 100755 --- a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py +++ b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py @@ -18,21 +18,23 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='AlphaComplex and RipsComplex ' - 'persistence creation from points read in ' - 'a OFF file. Bottleneck distance computation' - ' on each dimension', - epilog='Example: ' - 'example/alpha_rips_persistence_bottleneck_distance.py ' - '-f ../data/points/tore3D_1307.off -t 0.15 -d 3') +parser = argparse.ArgumentParser( + description="AlphaComplex and RipsComplex " + "persistence creation from points read in " + "a OFF file. Bottleneck distance computation" + " on each dimension", + epilog="Example: " + "example/alpha_rips_persistence_bottleneck_distance.py " + "-f ../data/points/tore3D_1307.off -t 0.15 -d 3", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-t", "--threshold", type=float, default=0.5) parser.add_argument("-d", "--max_dimension", type=int, default=1) args = parser.parse_args() -with open(args.file, 'r') as f: +with open(args.file, "r") as f: first_line = f.readline() - if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): + if (first_line == "OFF\n") or (first_line == "nOFF\n"): point_cloud = gudhi.read_off(off_file=args.file) print("#####################################################################") print("RipsComplex creation from points read in a OFF file") @@ -40,8 +42,9 @@ with open(args.file, 'r') as f: message = "RipsComplex with max_edge_length=" + repr(args.threshold) print(message) - rips_complex = gudhi.RipsComplex(points=point_cloud, - max_edge_length=args.threshold) + rips_complex = gudhi.RipsComplex( + points=point_cloud, max_edge_length=args.threshold + ) rips_stree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) @@ -57,7 +60,9 @@ with open(args.file, 'r') as f: print(message) alpha_complex = gudhi.AlphaComplex(points=point_cloud) - alpha_stree = alpha_complex.create_simplex_tree(max_alpha_square=(args.threshold * args.threshold)) + alpha_stree = alpha_complex.create_simplex_tree( + max_alpha_square=(args.threshold * args.threshold) + ) message = "Number of simplices=" + repr(alpha_stree.num_simplices()) print(message) @@ -71,15 +76,26 @@ with open(args.file, 'r') as f: funcs = [math.sqrt, math.sqrt] alpha_intervals = [] for interval in alpha_stree.persistence_intervals_in_dimension(dim): - alpha_intervals.append(map(lambda func,value: func(value), funcs, interval)) + alpha_intervals.append( + map(lambda func, value: func(value), funcs, interval) + ) rips_intervals = rips_stree.persistence_intervals_in_dimension(dim) - bottleneck_distance = gudhi.bottleneck_distance(rips_intervals, alpha_intervals) - message = "In dimension " + repr(dim) + ", bottleneck distance = " + repr(bottleneck_distance) + bottleneck_distance = gudhi.bottleneck_distance( + rips_intervals, alpha_intervals + ) + message = ( + "In dimension " + + repr(dim) + + ", bottleneck distance = " + + repr(bottleneck_distance) + ) print(message) max_b_distance = max(bottleneck_distance, max_b_distance) - print("================================================================================") + print( + "================================================================================" + ) message = "Bottleneck distance is " + repr(max_b_distance) print(message) diff --git a/src/cython/example/bottleneck_basic_example.py b/src/cython/example/bottleneck_basic_example.py index f47e24d4..392d2a6e 100755 --- a/src/cython/example/bottleneck_basic_example.py +++ b/src/cython/example/bottleneck_basic_example.py @@ -16,9 +16,9 @@ __author__ = "Francois Godi, Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]] +diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]] -diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]] +diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]] message = "diag1=" + repr(diag1) print(message) @@ -26,9 +26,12 @@ print(message) message = "diag2=" + repr(diag2) print(message) -message = "Bottleneck distance approximation=" + repr(gudhi.bottleneck_distance(diag1, diag2, 0.1)) +message = "Bottleneck distance approximation=" + repr( + gudhi.bottleneck_distance(diag1, diag2, 0.1) +) print(message) -message = "Bottleneck distance exact value=" + repr(gudhi.bottleneck_distance(diag1, diag2)) +message = "Bottleneck distance exact value=" + repr( + gudhi.bottleneck_distance(diag1, diag2) +) print(message) - diff --git a/src/cython/example/coordinate_graph_induced_complex.py b/src/cython/example/coordinate_graph_induced_complex.py index f30eaf3e..e32141b4 100755 --- a/src/cython/example/coordinate_graph_induced_complex.py +++ b/src/cython/example/coordinate_graph_induced_complex.py @@ -17,24 +17,31 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='Coordinate GIC ' - 'from points read in a OFF file.', - epilog='Example: ' - 'example/coordinate_graph_induced_complex.py ' - '-f ../data/points/KleinBottle5D.off -c 0 -v' - '- Constructs the coordinate GIC with the ' - 'points from the given OFF file.') +parser = argparse.ArgumentParser( + description="Coordinate GIC " "from points read in a OFF file.", + epilog="Example: " + "example/coordinate_graph_induced_complex.py " + "-f ../data/points/KleinBottle5D.off -c 0 -v" + "- Constructs the coordinate GIC with the " + "points from the given OFF file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-c", "--coordinate", type=int, default=0) -parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity') +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) args = parser.parse_args() nerve_complex = gudhi.CoverComplex() nerve_complex.set_verbose(args.verbose) -if (nerve_complex.read_point_cloud(args.file)): - nerve_complex.set_type('GIC') +if nerve_complex.read_point_cloud(args.file): + nerve_complex.set_type("GIC") nerve_complex.set_color_from_coordinate(args.coordinate) nerve_complex.set_function_from_coordinate(args.coordinate) nerve_complex.set_graph_from_automatic_rips() @@ -45,12 +52,17 @@ if (nerve_complex.read_point_cloud(args.file)): nerve_complex.plot_dot() simplex_tree = nerve_complex.create_simplex_tree() nerve_complex.compute_PD() - if (args.verbose): - print('Iterator on coordinate GIC simplices') - result_str = 'Coordinate GIC is of dimension ' + \ - repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' + if args.verbose: + print("Iterator on coordinate GIC simplices") + result_str = ( + "Coordinate GIC is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) print(result_str) for filtered_value in simplex_tree.get_filtration(): print(filtered_value[0]) diff --git a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py index 1a8de8bd..610ba44f 100755 --- a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py @@ -17,38 +17,54 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='EuclideanStrongWitnessComplex creation from ' - 'points read in a OFF file.', - epilog='Example: ' - 'example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py ' - '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2' - '- Constructs a strong witness complex with the ' - 'points from the given OFF file.') +parser = argparse.ArgumentParser( + description="EuclideanStrongWitnessComplex creation from " + "points read in a OFF file.", + epilog="Example: " + "example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2" + "- Constructs a strong witness complex with the " + "points from the given OFF file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-a", "--max_alpha_square", type=float, required=True) parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) parser.add_argument("-d", "--limit_dimension", type=int, required=True) -parser.add_argument("-b", "--band", type=float, default=0.) -parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams') +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) args = parser.parse_args() -with open(args.file, 'r') as f: +with open(args.file, "r") as f: first_line = f.readline() - if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): + if (first_line == "OFF\n") or (first_line == "nOFF\n"): print("#####################################################################") print("EuclideanStrongWitnessComplex creation from points read in a OFF file") witnesses = gudhi.read_off(off_file=args.file) - landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks) - - message = "EuclideanStrongWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \ - " - Number of landmarks=" + repr(args.number_of_landmarks) + landmarks = gudhi.pick_n_random_points( + points=witnesses, nb_points=args.number_of_landmarks + ) + + message = ( + "EuclideanStrongWitnessComplex with max_edge_length=" + + repr(args.max_alpha_square) + + " - Number of landmarks=" + + repr(args.number_of_landmarks) + ) print(message) - witness_complex = gudhi.EuclideanStrongWitnessComplex(witnesses=witnesses, landmarks=landmarks) - simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square, - limit_dimension=args.limit_dimension) + witness_complex = gudhi.EuclideanStrongWitnessComplex( + witnesses=witnesses, landmarks=landmarks + ) + simplex_tree = witness_complex.create_simplex_tree( + max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension + ) message = "Number of simplices=" + repr(simplex_tree.num_simplices()) print(message) diff --git a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py index 9a17f8de..7587b732 100755 --- a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py @@ -17,38 +17,53 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='EuclideanWitnessComplex creation from ' - 'points read in a OFF file.', - epilog='Example: ' - 'example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py ' - '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2' - '- Constructs a weak witness complex with the ' - 'points from the given OFF file.') +parser = argparse.ArgumentParser( + description="EuclideanWitnessComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2" + "- Constructs a weak witness complex with the " + "points from the given OFF file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-a", "--max_alpha_square", type=float, required=True) parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) parser.add_argument("-d", "--limit_dimension", type=int, required=True) -parser.add_argument("-b", "--band", type=float, default=0.) -parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams') +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) args = parser.parse_args() -with open(args.file, 'r') as f: +with open(args.file, "r") as f: first_line = f.readline() - if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): + if (first_line == "OFF\n") or (first_line == "nOFF\n"): print("#####################################################################") print("EuclideanWitnessComplex creation from points read in a OFF file") witnesses = gudhi.read_off(off_file=args.file) - landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks) - - message = "EuclideanWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \ - " - Number of landmarks=" + repr(args.number_of_landmarks) + landmarks = gudhi.pick_n_random_points( + points=witnesses, nb_points=args.number_of_landmarks + ) + + message = ( + "EuclideanWitnessComplex with max_edge_length=" + + repr(args.max_alpha_square) + + " - Number of landmarks=" + + repr(args.number_of_landmarks) + ) print(message) - witness_complex = gudhi.EuclideanWitnessComplex(witnesses=witnesses, landmarks=landmarks) - simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square, - limit_dimension=args.limit_dimension) + witness_complex = gudhi.EuclideanWitnessComplex( + witnesses=witnesses, landmarks=landmarks + ) + simplex_tree = witness_complex.create_simplex_tree( + max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension + ) message = "Number of simplices=" + repr(simplex_tree.num_simplices()) print(message) diff --git a/src/cython/example/functional_graph_induced_complex.py b/src/cython/example/functional_graph_induced_complex.py index f87c6837..8b645040 100755 --- a/src/cython/example/functional_graph_induced_complex.py +++ b/src/cython/example/functional_graph_induced_complex.py @@ -17,25 +17,32 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='Functional GIC ' - 'from points read in a OFF file.', - epilog='Example: ' - 'example/functional_graph_induced_complex.py ' - '-o ../data/points/COIL_database/lucky_cat.off ' - '-f ../data/points/COIL_database/lucky_cat_PCA1' - '- Constructs the functional GIC with the ' - 'points from the given OFF and function files.') +parser = argparse.ArgumentParser( + description="Functional GIC " "from points read in a OFF file.", + epilog="Example: " + "example/functional_graph_induced_complex.py " + "-o ../data/points/COIL_database/lucky_cat.off " + "-f ../data/points/COIL_database/lucky_cat_PCA1" + "- Constructs the functional GIC with the " + "points from the given OFF and function files.", +) parser.add_argument("-o", "--off-file", type=str, required=True) parser.add_argument("-f", "--function-file", type=str, required=True) -parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity') +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) args = parser.parse_args() nerve_complex = gudhi.CoverComplex() nerve_complex.set_verbose(args.verbose) -if (nerve_complex.read_point_cloud(args.off_file)): - nerve_complex.set_type('GIC') +if nerve_complex.read_point_cloud(args.off_file): + nerve_complex.set_type("GIC") nerve_complex.set_color_from_file(args.function_file) nerve_complex.set_function_from_file(args.function_file) nerve_complex.set_graph_from_automatic_rips() @@ -46,12 +53,17 @@ if (nerve_complex.read_point_cloud(args.off_file)): nerve_complex.plot_dot() simplex_tree = nerve_complex.create_simplex_tree() nerve_complex.compute_PD() - if (args.verbose): - print('Iterator on functional GIC simplices') - result_str = 'Functional GIC is of dimension ' + \ - repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' + if args.verbose: + print("Iterator on functional GIC simplices") + result_str = ( + "Functional GIC is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) print(result_str) for filtered_value in simplex_tree.get_filtration(): print(filtered_value[0]) diff --git a/src/cython/example/gudhi_graphical_tools_example.py b/src/cython/example/gudhi_graphical_tools_example.py index 4f64c615..3b0ca54d 100755 --- a/src/cython/example/gudhi_graphical_tools_example.py +++ b/src/cython/example/gudhi_graphical_tools_example.py @@ -19,9 +19,15 @@ __license__ = "MIT" print("#####################################################################") print("Show barcode persistence example") -persistence = [(2, (1.0, float('inf'))), (1, (1.4142135623730951, float('inf'))), - (1, (1.4142135623730951, float('inf'))), (0, (0.0, float('inf'))), - (0, (0.0, 1.0)), (0, (0.0, 1.0)), (0, (0.0, 1.0))] +persistence = [ + (2, (1.0, float("inf"))), + (1, (1.4142135623730951, float("inf"))), + (1, (1.4142135623730951, float("inf"))), + (0, (0.0, float("inf"))), + (0, (0.0, 1.0)), + (0, (0.0, 1.0)), + (0, (0.0, 1.0)), +] gudhi.plot_persistence_barcode(persistence) print("#####################################################################") diff --git a/src/cython/example/nerve_of_a_covering.py b/src/cython/example/nerve_of_a_covering.py index 707f0631..3c8e0f90 100755 --- a/src/cython/example/nerve_of_a_covering.py +++ b/src/cython/example/nerve_of_a_covering.py @@ -17,26 +17,33 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='Nerve of a covering creation ' - 'from points read in a OFF file.', - epilog='Example: ' - 'example/nerve_of_a_covering.py ' - '-f ../data/points/human.off -c 2 -r 10 -g 0.3' - '- Constructs Nerve of a covering with the ' - 'points from the given OFF file.') +parser = argparse.ArgumentParser( + description="Nerve of a covering creation " "from points read in a OFF file.", + epilog="Example: " + "example/nerve_of_a_covering.py " + "-f ../data/points/human.off -c 2 -r 10 -g 0.3" + "- Constructs Nerve of a covering with the " + "points from the given OFF file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-c", "--coordinate", type=int, default=0) parser.add_argument("-r", "--resolution", type=int, default=10) parser.add_argument("-g", "--gain", type=float, default=0.3) -parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity') +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) args = parser.parse_args() nerve_complex = gudhi.CoverComplex() nerve_complex.set_verbose(args.verbose) -if (nerve_complex.read_point_cloud(args.file)): - nerve_complex.set_type('Nerve') +if nerve_complex.read_point_cloud(args.file): + nerve_complex.set_type("Nerve") nerve_complex.set_color_from_coordinate(args.coordinate) nerve_complex.set_function_from_coordinate(args.coordinate) nerve_complex.set_graph_from_OFF() @@ -47,12 +54,17 @@ if (nerve_complex.read_point_cloud(args.file)): nerve_complex.write_info() simplex_tree = nerve_complex.create_simplex_tree() nerve_complex.compute_PD() - if (args.verbose): - print('Iterator on graph induced complex simplices') - result_str = 'Nerve is of dimension ' + \ - repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' + if args.verbose: + print("Iterator on graph induced complex simplices") + result_str = ( + "Nerve is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) print(result_str) for filtered_value in simplex_tree.get_filtration(): print(filtered_value[0]) diff --git a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py index 40a2fed1..9cb855cd 100755 --- a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py +++ b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py @@ -17,8 +17,9 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" + def is_file_perseus(file): - num_lines = open(file).read().count('\n') + num_lines = open(file).read().count("\n") try: f = open(file) num_dim = int(f.readline()) @@ -36,14 +37,21 @@ def is_file_perseus(file): except ValueError: return False -parser = argparse.ArgumentParser(description='Periodic cubical complex from a ' - 'Perseus-style file name.', - epilog='Example: ' - './periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py' - ' -f ../data/bitmap/CubicalTwoSphere.txt') + +parser = argparse.ArgumentParser( + description="Periodic cubical complex from a " "Perseus-style file name.", + epilog="Example: " + "./periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py" + " -f ../data/bitmap/CubicalTwoSphere.txt", +) parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument('--no-barcode', default=False, action='store_true' , help='Flag for not to display the barcodes') +parser.add_argument( + "--no-barcode", + default=False, + action="store_true", + help="Flag for not to display the barcodes", +) args = parser.parse_args() @@ -53,7 +61,9 @@ if is_file_perseus(args.file): periodic_cubical_complex = gudhi.PeriodicCubicalComplex(perseus_file=args.file) print("persistence(homology_coeff_field=3, min_persistence=0)=") - diag = periodic_cubical_complex.persistence(homology_coeff_field=3, min_persistence=0) + diag = periodic_cubical_complex.persistence( + homology_coeff_field=3, min_persistence=0 + ) print(diag) print("betti_numbers()=") diff --git a/src/cython/example/random_cubical_complex_persistence_example.py b/src/cython/example/random_cubical_complex_persistence_example.py index 12db3f41..da0eb177 100755 --- a/src/cython/example/random_cubical_complex_persistence_example.py +++ b/src/cython/example/random_cubical_complex_persistence_example.py @@ -21,23 +21,26 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='Random cubical complex.', - epilog='Example: ' - './random_cubical_complex_persistence_example.py' - ' 10 10 10 - Constructs a random cubical ' - 'complex in a dimension [10, 10, 10] (aka. ' - '1000 random top dimensional cells).') -parser.add_argument('dimension', type=int, nargs="*", - help='Cubical complex dimensions') +parser = argparse.ArgumentParser( + description="Random cubical complex.", + epilog="Example: " + "./random_cubical_complex_persistence_example.py" + " 10 10 10 - Constructs a random cubical " + "complex in a dimension [10, 10, 10] (aka. " + "1000 random top dimensional cells).", +) +parser.add_argument("dimension", type=int, nargs="*", help="Cubical complex dimensions") args = parser.parse_args() dimension_multiplication = reduce(operator.mul, args.dimension, 1) -if dimension_multiplication > 1: +if dimension_multiplication > 1: print("#####################################################################") print("CubicalComplex creation") - cubical_complex = gudhi.CubicalComplex(dimensions=args.dimension, - top_dimensional_cells = numpy.random.rand(dimension_multiplication)) + cubical_complex = gudhi.CubicalComplex( + dimensions=args.dimension, + top_dimensional_cells=numpy.random.rand(dimension_multiplication), + ) print("persistence(homology_coeff_field=2, min_persistence=0)=") print(cubical_complex.persistence(homology_coeff_field=2, min_persistence=0)) diff --git a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py index 98be0123..3571580b 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py @@ -18,22 +18,28 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2017 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='RipsComplex creation from ' - 'a correlation matrix read in a csv file.', - epilog='Example: ' - 'example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py ' - '-f ../data/correlation_matrix/lower_triangular_correlation_matrix.csv -e 12.0 -d 3' - '- Constructs a Rips complex with the ' - 'correlation matrix from the given csv file.') +parser = argparse.ArgumentParser( + description="RipsComplex creation from " "a correlation matrix read in a csv file.", + epilog="Example: " + "example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py " + "-f ../data/correlation_matrix/lower_triangular_correlation_matrix.csv -e 12.0 -d 3" + "- Constructs a Rips complex with the " + "correlation matrix from the given csv file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-c", "--min_edge_correlation", type=float, default=0.5) parser.add_argument("-d", "--max_dimension", type=int, default=1) -parser.add_argument("-b", "--band", type=float, default=0.) -parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams') +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) args = parser.parse_args() -if not (-1. < args.min_edge_correlation < 1.): +if not (-1.0 < args.min_edge_correlation < 1.0): print("Wrong value of the treshold corelation (should be between -1 and 1).") sys.exit(1) @@ -48,12 +54,18 @@ print("RipsComplex creation from correlation matrix read in a csv file") message = "RipsComplex with min_edge_correlation=" + repr(args.min_edge_correlation) print(message) -correlation_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=args.file) +correlation_matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file=args.file +) # Given a correlation matrix M, we compute component-wise M'[i,j] = 1-M[i,j] to get a distance matrix: -distance_matrix = [[1.-correlation_matrix[i][j] for j in range(len(correlation_matrix[i]))] for i in range(len(correlation_matrix))] - -rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, - max_edge_length=1.-args.min_edge_correlation) +distance_matrix = [ + [1.0 - correlation_matrix[i][j] for j in range(len(correlation_matrix[i]))] + for i in range(len(correlation_matrix)) +] + +rips_complex = gudhi.RipsComplex( + distance_matrix=distance_matrix, max_edge_length=1.0 - args.min_edge_correlation +) simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) message = "Number of simplices=" + repr(simplex_tree.num_simplices()) @@ -65,7 +77,10 @@ print("betti_numbers()=") print(simplex_tree.betti_numbers()) # invert the persistence diagram -invert_diag = [(diag[pers][0],(1.-diag[pers][1][0], 1.-diag[pers][1][1])) for pers in range(len(diag))] +invert_diag = [ + (diag[pers][0], (1.0 - diag[pers][1][0], 1.0 - diag[pers][1][1])) + for pers in range(len(diag)) +] if args.no_diagram == False: pplot = gudhi.plot_persistence_diagram(invert_diag, band=args.band) diff --git a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py index f02dc0c0..0b9a9ba9 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py @@ -17,18 +17,24 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='RipsComplex creation from ' - 'a distance matrix read in a csv file.', - epilog='Example: ' - 'example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py ' - '-f ../data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3' - '- Constructs a Rips complex with the ' - 'distance matrix from the given csv file.') +parser = argparse.ArgumentParser( + description="RipsComplex creation from " "a distance matrix read in a csv file.", + epilog="Example: " + "example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py " + "-f ../data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3" + "- Constructs a Rips complex with the " + "distance matrix from the given csv file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-e", "--max_edge_length", type=float, default=0.5) parser.add_argument("-d", "--max_dimension", type=int, default=1) -parser.add_argument("-b", "--band", type=float, default=0.) -parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams') +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) args = parser.parse_args() @@ -39,7 +45,9 @@ message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length) print(message) distance_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=args.file) -rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, max_edge_length=args.max_edge_length) +rips_complex = gudhi.RipsComplex( + distance_matrix=distance_matrix, max_edge_length=args.max_edge_length +) simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) message = "Number of simplices=" + repr(simplex_tree.num_simplices()) diff --git a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py index e1f8b85d..2b335bba 100755 --- a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py +++ b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py @@ -17,42 +17,52 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='RipsComplex creation from ' - 'points read in a OFF file.', - epilog='Example: ' - 'example/rips_complex_diagram_persistence_from_off_file_example.py ' - '-f ../data/points/tore3D_300.off -a 0.6' - '- Constructs a Rips complex with the ' - 'points from the given OFF file.') +parser = argparse.ArgumentParser( + description="RipsComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/rips_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 0.6" + "- Constructs a Rips complex with the " + "points from the given OFF file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-e", "--max_edge_length", type=float, default=0.5) parser.add_argument("-d", "--max_dimension", type=int, default=1) -parser.add_argument("-b", "--band", type=float, default=0.) -parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams') +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) args = parser.parse_args() -with open(args.file, 'r') as f: +with open(args.file, "r") as f: first_line = f.readline() - if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): + if (first_line == "OFF\n") or (first_line == "nOFF\n"): print("#####################################################################") print("RipsComplex creation from points read in a OFF file") - + message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length) print(message) - + point_cloud = gudhi.read_off(off_file=args.file) - rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=args.max_edge_length) - simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) - + rips_complex = gudhi.RipsComplex( + points=point_cloud, max_edge_length=args.max_edge_length + ) + simplex_tree = rips_complex.create_simplex_tree( + max_dimension=args.max_dimension + ) + message = "Number of simplices=" + repr(simplex_tree.num_simplices()) print(message) - + diag = simplex_tree.persistence() - + print("betti_numbers()=") print(simplex_tree.betti_numbers()) - + if args.no_diagram == False: pplot = gudhi.plot_persistence_diagram(diag, band=args.band) pplot.show() diff --git a/src/cython/example/rips_complex_from_points_example.py b/src/cython/example/rips_complex_from_points_example.py index f0d7a097..59d8a261 100755 --- a/src/cython/example/rips_complex_from_points_example.py +++ b/src/cython/example/rips_complex_from_points_example.py @@ -18,8 +18,7 @@ __license__ = "MIT" print("#####################################################################") print("RipsComplex creation from points") -rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], - max_edge_length=42) +rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42) simplex_tree = rips.create_simplex_tree(max_dimension=1) diff --git a/src/cython/example/rips_persistence_diagram.py b/src/cython/example/rips_persistence_diagram.py index 6b02eac9..f5897d7b 100755 --- a/src/cython/example/rips_persistence_diagram.py +++ b/src/cython/example/rips_persistence_diagram.py @@ -18,8 +18,7 @@ __license__ = "MIT" print("#####################################################################") print("RipsComplex creation from points") -rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], - max_edge_length=42) +rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42) simplex_tree = rips.create_simplex_tree(max_dimension=1) diff --git a/src/cython/example/sparse_rips_persistence_diagram.py b/src/cython/example/sparse_rips_persistence_diagram.py index e58baf45..671d5e34 100755 --- a/src/cython/example/sparse_rips_persistence_diagram.py +++ b/src/cython/example/sparse_rips_persistence_diagram.py @@ -18,8 +18,9 @@ __license__ = "MIT" print("#####################################################################") print("Sparse RipsComplex creation from points") -rips = gudhi.RipsComplex(points=[[0, 0], [0, 0.1], [1, 0], [0, 1], [1, 1]], - max_edge_length=42, sparse=.5) +rips = gudhi.RipsComplex( + points=[[0, 0], [0, 0.1], [1, 0], [0, 1], [1, 1]], max_edge_length=42, sparse=0.5 +) simplex_tree = rips.create_simplex_tree(max_dimension=2) diff --git a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py index 075149fb..456bc9eb 100755 --- a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py +++ b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py @@ -17,38 +17,44 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='TangentialComplex creation from ' - 'points read in a OFF file.', - epilog='Example: ' - 'example/tangential_complex_plain_homology_from_off_file_example.py ' - '-f ../data/points/tore3D_300.off -i 3' - '- Constructs a tangential complex with the ' - 'points from the given OFF file') +parser = argparse.ArgumentParser( + description="TangentialComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/tangential_complex_plain_homology_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -i 3" + "- Constructs a tangential complex with the " + "points from the given OFF file", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-i", "--intrisic_dim", type=int, required=True) -parser.add_argument("-b", "--band", type=float, default=0.) -parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams') +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) args = parser.parse_args() -with open(args.file, 'r') as f: +with open(args.file, "r") as f: first_line = f.readline() - if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): + if (first_line == "OFF\n") or (first_line == "nOFF\n"): print("#####################################################################") print("TangentialComplex creation from points read in a OFF file") - - tc = gudhi.TangentialComplex(intrisic_dim = args.intrisic_dim, off_file=args.file) + + tc = gudhi.TangentialComplex(intrisic_dim=args.intrisic_dim, off_file=args.file) tc.compute_tangential_complex() st = tc.create_simplex_tree() - + message = "Number of simplices=" + repr(st.num_simplices()) print(message) - - diag = st.persistence(persistence_dim_max = True) - + + diag = st.persistence(persistence_dim_max=True) + print("betti_numbers()=") print(st.betti_numbers()) - + if args.no_diagram == False: pplot = gudhi.plot_persistence_diagram(diag, band=args.band) pplot.show() diff --git a/src/cython/example/voronoi_graph_induced_complex.py b/src/cython/example/voronoi_graph_induced_complex.py index 1e62fb60..38be6c92 100755 --- a/src/cython/example/voronoi_graph_induced_complex.py +++ b/src/cython/example/voronoi_graph_induced_complex.py @@ -17,24 +17,31 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" __license__ = "MIT" -parser = argparse.ArgumentParser(description='Voronoi GIC ' - 'from points read in a OFF file.', - epilog='Example: ' - 'example/voronoi_graph_induced_complex.py ' - '-f ../data/points/human.off -n 700 -v' - '- Constructs the Voronoi GIC with the ' - 'points from the given OFF file.') +parser = argparse.ArgumentParser( + description="Voronoi GIC " "from points read in a OFF file.", + epilog="Example: " + "example/voronoi_graph_induced_complex.py " + "-f ../data/points/human.off -n 700 -v" + "- Constructs the Voronoi GIC with the " + "points from the given OFF file.", +) parser.add_argument("-f", "--file", type=str, required=True) parser.add_argument("-n", "--subsample-nb-points", type=int, default=100) -parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity') +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) args = parser.parse_args() nerve_complex = gudhi.CoverComplex() nerve_complex.set_verbose(args.verbose) -if (nerve_complex.read_point_cloud(args.file)): - nerve_complex.set_type('GIC') +if nerve_complex.read_point_cloud(args.file): + nerve_complex.set_type("GIC") nerve_complex.set_color_from_coordinate() nerve_complex.set_graph_from_OFF() nerve_complex.set_cover_from_Voronoi(args.subsample_nb_points) @@ -42,12 +49,17 @@ if (nerve_complex.read_point_cloud(args.file)): nerve_complex.plot_off() simplex_tree = nerve_complex.create_simplex_tree() nerve_complex.compute_PD() - if (args.verbose): - print('Iterator on graph induced complex simplices') - result_str = 'Graph induced complex is of dimension ' + \ - repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' + if args.verbose: + print("Iterator on graph induced complex simplices") + result_str = ( + "Graph induced complex is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) print(result_str) for filtered_value in simplex_tree.get_filtration(): print(filtered_value[0]) diff --git a/src/cython/example/witness_complex_from_nearest_landmark_table.py b/src/cython/example/witness_complex_from_nearest_landmark_table.py index 5cf954bf..c04a82b2 100755 --- a/src/cython/example/witness_complex_from_nearest_landmark_table.py +++ b/src/cython/example/witness_complex_from_nearest_landmark_table.py @@ -18,11 +18,13 @@ __license__ = "MIT" print("#####################################################################") print("WitnessComplex creation from nearest landmark table") -nearest_landmark_table = [[[0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]], - [[1, 0.0], [2, 0.1], [3, 0.2], [4, 0.3], [0, 0.4]], - [[2, 0.0], [3, 0.1], [4, 0.2], [0, 0.3], [1, 0.4]], - [[3, 0.0], [4, 0.1], [0, 0.2], [1, 0.3], [2, 0.4]], - [[4, 0.0], [0, 0.1], [1, 0.2], [2, 0.3], [3, 0.4]]] +nearest_landmark_table = [ + [[0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]], + [[1, 0.0], [2, 0.1], [3, 0.2], [4, 0.3], [0, 0.4]], + [[2, 0.0], [3, 0.1], [4, 0.2], [0, 0.3], [1, 0.4]], + [[3, 0.0], [4, 0.1], [0, 0.2], [1, 0.3], [2, 0.4]], + [[4, 0.0], [0, 0.1], [1, 0.2], [2, 0.3], [3, 0.4]], +] witness_complex = StrongWitnessComplex(nearest_landmark_table=nearest_landmark_table) simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=0.41) diff --git a/src/cython/test/test_alpha_complex.py b/src/cython/test/test_alpha_complex.py index a573115a..24f8bf53 100755 --- a/src/cython/test/test_alpha_complex.py +++ b/src/cython/test/test_alpha_complex.py @@ -16,9 +16,10 @@ __license__ = "MIT" def test_empty_alpha(): - alpha_complex = AlphaComplex(points=[[0,0]]) + alpha_complex = AlphaComplex(points=[[0, 0]]) assert alpha_complex.__is_defined() == True + def test_infinite_alpha(): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] alpha_complex = AlphaComplex(points=point_list) @@ -29,18 +30,28 @@ def test_infinite_alpha(): assert simplex_tree.num_simplices() == 11 assert simplex_tree.num_vertices() == 4 - - assert simplex_tree.get_filtration() == \ - [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0), - ([0, 1], 0.25), ([0, 2], 0.25), ([1, 3], 0.25), - ([2, 3], 0.25), ([1, 2], 0.5), ([0, 1, 2], 0.5), - ([1, 2, 3], 0.5)] - assert simplex_tree.get_star([0]) == \ - [([0], 0.0), ([0, 1], 0.25), ([0, 1, 2], 0.5), - ([0, 2], 0.25)] - assert simplex_tree.get_cofaces([0], 1) == \ - [([0, 1], 0.25), ([0, 2], 0.25)] - + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 0.25), + ([0, 2], 0.25), + ([1, 3], 0.25), + ([2, 3], 0.25), + ([1, 2], 0.5), + ([0, 1, 2], 0.5), + ([1, 2, 3], 0.5), + ] + assert simplex_tree.get_star([0]) == [ + ([0], 0.0), + ([0, 1], 0.25), + ([0, 1, 2], 0.5), + ([0, 2], 0.25), + ] + assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)] + assert point_list[0] == alpha_complex.get_point(0) assert point_list[1] == alpha_complex.get_point(1) assert point_list[2] == alpha_complex.get_point(2) @@ -48,6 +59,7 @@ def test_infinite_alpha(): assert alpha_complex.get_point(4) == [] assert alpha_complex.get_point(125) == [] + def test_filtered_alpha(): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] filtered_alpha = AlphaComplex(points=point_list) @@ -64,11 +76,15 @@ def test_filtered_alpha(): assert filtered_alpha.get_point(4) == [] assert filtered_alpha.get_point(125) == [] - assert simplex_tree.get_filtration() == \ - [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0), - ([0, 1], 0.25), ([0, 2], 0.25), ([1, 3], 0.25), - ([2, 3], 0.25)] - assert simplex_tree.get_star([0]) == \ - [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)] - assert simplex_tree.get_cofaces([0], 1) == \ - [([0, 1], 0.25), ([0, 2], 0.25)] + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 0.25), + ([0, 2], 0.25), + ([1, 3], 0.25), + ([2, 3], 0.25), + ] + assert simplex_tree.get_star([0]) == [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)] + assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)] diff --git a/src/cython/test/test_bottleneck_distance.py b/src/cython/test/test_bottleneck_distance.py index 409fba9c..f5f019b9 100755 --- a/src/cython/test/test_bottleneck_distance.py +++ b/src/cython/test/test_bottleneck_distance.py @@ -16,8 +16,8 @@ __license__ = "MIT" def test_basic_bottleneck(): - diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]] - diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]] + diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]] + diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]] - assert(gudhi.bottleneck_distance(diag1, diag2, 0.1) == 0.8081763781405569) - assert(gudhi.bottleneck_distance(diag1, diag2) == 0.75) + assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == 0.8081763781405569 + assert gudhi.bottleneck_distance(diag1, diag2) == 0.75 diff --git a/src/cython/test/test_cover_complex.py b/src/cython/test/test_cover_complex.py index 1eb4ed37..8cd12272 100755 --- a/src/cython/test/test_cover_complex.py +++ b/src/cython/test/test_cover_complex.py @@ -20,61 +20,66 @@ def test_empty_constructor(): cover = CoverComplex() assert cover.__is_defined() == True + def test_non_existing_file_read(): # Try to open a non existing file cover = CoverComplex() - assert (cover.read_point_cloud('pouetpouettralala.toubiloubabdou') == False) + assert cover.read_point_cloud("pouetpouettralala.toubiloubabdou") == False + def test_files_creation(): # Create test file - cloud_file = open('cloud', 'w') - cloud_file.write('nOFF\n3\n3 0 0\n0 0 0\n2 1 0\n4 0 0') + cloud_file = open("cloud", "w") + cloud_file.write("nOFF\n3\n3 0 0\n0 0 0\n2 1 0\n4 0 0") cloud_file.close() - cover_file = open('cover', 'w') - cover_file.write('1\n2\n3') + cover_file = open("cover", "w") + cover_file.write("1\n2\n3") cover_file.close() - graph_file = open('graph', 'w') - graph_file.write('0 1\n0 2\n1 2') + graph_file = open("graph", "w") + graph_file.write("0 1\n0 2\n1 2") graph_file.close() + def test_nerve(): nerve = CoverComplex() - nerve.set_type('Nerve') - assert (nerve.read_point_cloud('cloud') == True) + nerve.set_type("Nerve") + assert nerve.read_point_cloud("cloud") == True nerve.set_color_from_coordinate() - nerve.set_graph_from_file('graph') - nerve.set_cover_from_file('cover') + nerve.set_graph_from_file("graph") + nerve.set_cover_from_file("cover") nerve.find_simplices() stree = nerve.create_simplex_tree() - assert (stree.num_vertices() == 3) - assert ((stree.num_simplices() - stree.num_vertices()) == 0) - assert (stree.dimension() == 0) + assert stree.num_vertices() == 3 + assert (stree.num_simplices() - stree.num_vertices()) == 0 + assert stree.dimension() == 0 + def test_graph_induced_complex(): gic = CoverComplex() - gic.set_type('GIC') - assert (gic.read_point_cloud('cloud') == True) + gic.set_type("GIC") + assert gic.read_point_cloud("cloud") == True gic.set_color_from_coordinate() - gic.set_graph_from_file('graph') - gic.set_cover_from_file('cover') + gic.set_graph_from_file("graph") + gic.set_cover_from_file("cover") gic.find_simplices() stree = gic.create_simplex_tree() - assert (stree.num_vertices() == 3) - assert ((stree.num_simplices() - stree.num_vertices()) == 4) - assert (stree.dimension() == 2) + assert stree.num_vertices() == 3 + assert (stree.num_simplices() - stree.num_vertices()) == 4 + assert stree.dimension() == 2 + def test_voronoi_graph_induced_complex(): gic = CoverComplex() - gic.set_type('GIC') - assert (gic.read_point_cloud('cloud') == True) + gic.set_type("GIC") + assert gic.read_point_cloud("cloud") == True gic.set_color_from_coordinate() - gic.set_graph_from_file('graph') + gic.set_graph_from_file("graph") gic.set_cover_from_Voronoi(2) gic.find_simplices() stree = gic.create_simplex_tree() - assert (stree.num_vertices() == 2) - assert ((stree.num_simplices() - stree.num_vertices()) == 1) - assert (stree.dimension() == 1) + assert stree.num_vertices() == 2 + assert (stree.num_simplices() - stree.num_vertices()) == 1 + assert stree.dimension() == 1 diff --git a/src/cython/test/test_cubical_complex.py b/src/cython/test/test_cubical_complex.py index 6dc7fdfc..68f54fbe 100755 --- a/src/cython/test/test_cubical_complex.py +++ b/src/cython/test/test_cubical_complex.py @@ -21,65 +21,78 @@ def test_empty_constructor(): assert cub.__is_defined() == False assert cub.__is_persistence_defined() == False + def test_non_existing_perseus_file_constructor(): # Try to open a non existing file - cub = CubicalComplex(perseus_file='pouetpouettralala.toubiloubabdou') + cub = CubicalComplex(perseus_file="pouetpouettralala.toubiloubabdou") assert cub.__is_defined() == False assert cub.__is_persistence_defined() == False + def test_dimension_or_perseus_file_constructor(): # Create test file - test_file = open('CubicalOneSphere.txt', 'w') - test_file.write('2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n') + test_file = open("CubicalOneSphere.txt", "w") + test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n") test_file.close() # CubicalComplex can be constructed from dimensions and # top_dimensional_cells OR from a Perseus-style file name. - cub = CubicalComplex(dimensions=[3, 3], - top_dimensional_cells = [1,2,3,4,5,6,7,8,9], - perseus_file='CubicalOneSphere.txt') + cub = CubicalComplex( + dimensions=[3, 3], + top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9], + perseus_file="CubicalOneSphere.txt", + ) assert cub.__is_defined() == False assert cub.__is_persistence_defined() == False - cub = CubicalComplex(top_dimensional_cells = [1,2,3,4,5,6,7,8,9], - perseus_file='CubicalOneSphere.txt') + cub = CubicalComplex( + top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9], + perseus_file="CubicalOneSphere.txt", + ) assert cub.__is_defined() == False assert cub.__is_persistence_defined() == False - cub = CubicalComplex(dimensions=[3, 3], - perseus_file='CubicalOneSphere.txt') + cub = CubicalComplex(dimensions=[3, 3], perseus_file="CubicalOneSphere.txt") assert cub.__is_defined() == False assert cub.__is_persistence_defined() == False + def test_dimension_simple_constructor(): - cub = CubicalComplex(dimensions=[3, 3], - top_dimensional_cells = [1,2,3,4,5,6,7,8,9]) + cub = CubicalComplex( + dimensions=[3, 3], top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9] + ) assert cub.__is_defined() == True assert cub.__is_persistence_defined() == False - assert cub.persistence() == [(0, (1.0, float('inf')))] + assert cub.persistence() == [(0, (1.0, float("inf")))] assert cub.__is_persistence_defined() == True assert cub.betti_numbers() == [1, 0, 0] assert cub.persistent_betti_numbers(0, 1000) == [0, 0, 0] + def test_user_case_simple_constructor(): - cub = CubicalComplex(dimensions=[3, 3], - top_dimensional_cells = [float('inf'), 0.,0.,0.,1.,0.,0.,0.,0.]) + cub = CubicalComplex( + dimensions=[3, 3], + top_dimensional_cells=[float("inf"), 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + ) assert cub.__is_defined() == True assert cub.__is_persistence_defined() == False - assert cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float('inf')))] + assert cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))] assert cub.__is_persistence_defined() == True - other_cub = CubicalComplex(dimensions=[3, 3], - top_dimensional_cells = [1000., 0.,0.,0.,1.,0.,0.,0.,0.]) - assert other_cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float('inf')))] + other_cub = CubicalComplex( + dimensions=[3, 3], + top_dimensional_cells=[1000.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + ) + assert other_cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))] + def test_dimension_file_constructor(): # Create test file - test_file = open('CubicalOneSphere.txt', 'w') - test_file.write('2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n') + test_file = open("CubicalOneSphere.txt", "w") + test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n") test_file.close() - cub = CubicalComplex(perseus_file='CubicalOneSphere.txt') + cub = CubicalComplex(perseus_file="CubicalOneSphere.txt") assert cub.__is_defined() == True assert cub.__is_persistence_defined() == False - assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float('inf')))] + assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float("inf")))] assert cub.__is_persistence_defined() == True assert cub.betti_numbers() == [1, 0, 0] assert cub.persistent_betti_numbers(0, 1000) == [1, 0, 0] diff --git a/src/cython/test/test_euclidean_witness_complex.py b/src/cython/test/test_euclidean_witness_complex.py index c8510b1e..f5eae5fa 100755 --- a/src/cython/test/test_euclidean_witness_complex.py +++ b/src/cython/test/test_euclidean_witness_complex.py @@ -19,31 +19,60 @@ def test_empty_euclidean_witness_complex(): euclidean_witness = gudhi.EuclideanWitnessComplex() assert euclidean_witness.__is_defined() == False + def test_witness_complex(): - point_cloud = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0], [9.0, 6.0], - [0.0, 14.0], [2.0, 19.0], [9.0, 17.0]] + point_cloud = [ + [1.0, 1.0], + [7.0, 0.0], + [4.0, 6.0], + [9.0, 6.0], + [0.0, 14.0], + [2.0, 19.0], + [9.0, 17.0], + ] landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]] - euclidean_witness_complex = gudhi.EuclideanWitnessComplex(landmarks=landmarks, witnesses = point_cloud) + euclidean_witness_complex = gudhi.EuclideanWitnessComplex( + landmarks=landmarks, witnesses=point_cloud + ) simplex_tree = euclidean_witness_complex.create_simplex_tree(max_alpha_square=4.1) assert landmarks[0] == euclidean_witness_complex.get_point(0) assert landmarks[1] == euclidean_witness_complex.get_point(1) assert landmarks[2] == euclidean_witness_complex.get_point(2) - assert simplex_tree.get_filtration() == [([0], 0.0), ([1], 0.0), - ([0, 1], 0.0), ([2], 0.0), ([0, 2], 0.0), ([1, 2], 0.0), - ([0, 1, 2], 0.0)] + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([0, 1], 0.0), + ([2], 0.0), + ([0, 2], 0.0), + ([1, 2], 0.0), + ([0, 1, 2], 0.0), + ] + def test_empty_euclidean_strong_witness_complex(): euclidean_strong_witness = gudhi.EuclideanStrongWitnessComplex() assert euclidean_strong_witness.__is_defined() == False + def test_strong_witness_complex(): - point_cloud = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0], [9.0, 6.0], - [0.0, 14.0], [2.0, 19.0], [9.0, 17.0]] + point_cloud = [ + [1.0, 1.0], + [7.0, 0.0], + [4.0, 6.0], + [9.0, 6.0], + [0.0, 14.0], + [2.0, 19.0], + [9.0, 17.0], + ] landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]] - euclidean_strong_witness_complex = gudhi.EuclideanStrongWitnessComplex(landmarks=landmarks, witnesses = point_cloud) - simplex_tree = euclidean_strong_witness_complex.create_simplex_tree(max_alpha_square=14.9) + euclidean_strong_witness_complex = gudhi.EuclideanStrongWitnessComplex( + landmarks=landmarks, witnesses=point_cloud + ) + simplex_tree = euclidean_strong_witness_complex.create_simplex_tree( + max_alpha_square=14.9 + ) assert landmarks[0] == euclidean_strong_witness_complex.get_point(0) assert landmarks[1] == euclidean_strong_witness_complex.get_point(1) @@ -51,9 +80,16 @@ def test_strong_witness_complex(): assert simplex_tree.get_filtration() == [([0], 0.0), ([1], 0.0), ([2], 0.0)] - simplex_tree = euclidean_strong_witness_complex.create_simplex_tree(max_alpha_square=100.0) - - assert simplex_tree.get_filtration() == [([0], 0.0), ([1], 0.0), - ([2], 0.0), ([1, 2], 15.0), ([0, 2], 34.0), ([0, 1], 37.0), - ([0, 1, 2], 37.0)] - + simplex_tree = euclidean_strong_witness_complex.create_simplex_tree( + max_alpha_square=100.0 + ) + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([1, 2], 15.0), + ([0, 2], 34.0), + ([0, 1], 37.0), + ([0, 1, 2], 37.0), + ] diff --git a/src/cython/test/test_reader_utils.py b/src/cython/test/test_reader_utils.py index 4b87adf2..4c7b32c2 100755 --- a/src/cython/test/test_reader_utils.py +++ b/src/cython/test/test_reader_utils.py @@ -18,60 +18,109 @@ __license__ = "MIT" def test_non_existing_csv_file(): # Try to open a non existing file - matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file='pouetpouettralala.toubiloubabdou') + matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file="pouetpouettralala.toubiloubabdou" + ) assert matrix == [] + def test_full_square_distance_matrix_csv_file(): # Create test file - test_file = open('full_square_distance_matrix.csv', 'w') - test_file.write('0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;') + test_file = open("full_square_distance_matrix.csv", "w") + test_file.write("0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;") test_file.close() - matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file="full_square_distance_matrix.csv") + matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file="full_square_distance_matrix.csv" + ) assert matrix == [[], [1.0], [2.0, 4.0], [3.0, 5.0, 6.0]] + def test_lower_triangular_distance_matrix_csv_file(): # Create test file - test_file = open('lower_triangular_distance_matrix.csv', 'w') - test_file.write('\n1,\n2,3,\n4,5,6,\n7,8,9,10,') + test_file = open("lower_triangular_distance_matrix.csv", "w") + test_file.write("\n1,\n2,3,\n4,5,6,\n7,8,9,10,") test_file.close() - matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file="lower_triangular_distance_matrix.csv", separator=",") + matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file="lower_triangular_distance_matrix.csv", separator="," + ) assert matrix == [[], [1.0], [2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0, 10.0]] + def test_non_existing_persistence_file(): # Try to open a non existing file - persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='pouetpouettralala.toubiloubabdou') + persistence = gudhi.read_persistence_intervals_grouped_by_dimension( + persistence_file="pouetpouettralala.toubiloubabdou" + ) assert persistence == [] - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='pouetpouettralala.toubiloubabdou', only_this_dim=1) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="pouetpouettralala.toubiloubabdou", only_this_dim=1 + ) np.testing.assert_array_equal(persistence, []) + def test_read_persistence_intervals_without_dimension(): # Create test file - test_file = open('persistence_intervals_without_dimension.pers', 'w') - test_file.write('# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf') + test_file = open("persistence_intervals_without_dimension.pers", "w") + test_file.write( + "# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf" + ) test_file.close() - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers') - np.testing.assert_array_equal(persistence, [(2.7, 3.7), (9.6, 14.), (34.2, 34.974), (3., float('Inf'))]) - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers', only_this_dim=0) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_without_dimension.pers" + ) + np.testing.assert_array_equal( + persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] + ) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=0 + ) np.testing.assert_array_equal(persistence, []) - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers', only_this_dim=1) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=1 + ) np.testing.assert_array_equal(persistence, []) - persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='persistence_intervals_without_dimension.pers') - assert persistence == {-1: [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float('Inf'))]} + persistence = gudhi.read_persistence_intervals_grouped_by_dimension( + persistence_file="persistence_intervals_without_dimension.pers" + ) + assert persistence == { + -1: [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] + } + def test_read_persistence_intervals_with_dimension(): # Create test file - test_file = open('persistence_intervals_with_dimension.pers', 'w') - test_file.write('# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf') + test_file = open("persistence_intervals_with_dimension.pers", "w") + test_file.write( + "# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf" + ) test_file.close() - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers') - np.testing.assert_array_equal(persistence, [(2.7, 3.7), (9.6, 14.), (34.2, 34.974), (3., float('Inf'))]) - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=0) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers" + ) + np.testing.assert_array_equal( + persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] + ) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=0 + ) np.testing.assert_array_equal(persistence, [(2.7, 3.7)]) - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=1) - np.testing.assert_array_equal(persistence, [(9.6, 14.), (3., float('Inf'))]) - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=2) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=1 + ) + np.testing.assert_array_equal(persistence, [(9.6, 14.0), (3.0, float("Inf"))]) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=2 + ) np.testing.assert_array_equal(persistence, []) - persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=3) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=3 + ) np.testing.assert_array_equal(persistence, [(34.2, 34.974)]) - persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='persistence_intervals_with_dimension.pers') - assert persistence == {0: [(2.7, 3.7)], 1: [(9.6, 14.0), (3.0, float('Inf'))], 3: [(34.2, 34.974)]} + persistence = gudhi.read_persistence_intervals_grouped_by_dimension( + persistence_file="persistence_intervals_with_dimension.pers" + ) + assert persistence == { + 0: [(2.7, 3.7)], + 1: [(9.6, 14.0), (3.0, float("Inf"))], + 3: [(34.2, 34.974)], + } diff --git a/src/cython/test/test_rips_complex.py b/src/cython/test/test_rips_complex.py index 975b447a..d55ae22f 100755 --- a/src/cython/test/test_rips_complex.py +++ b/src/cython/test/test_rips_complex.py @@ -19,6 +19,7 @@ __license__ = "MIT" def test_empty_rips(): rips_complex = RipsComplex() + def test_rips_from_points(): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] rips_complex = RipsComplex(points=point_list, max_edge_length=42) @@ -31,17 +32,30 @@ def test_rips_from_points(): assert simplex_tree.num_simplices() == 10 assert simplex_tree.num_vertices() == 4 - assert simplex_tree.get_filtration() == \ - [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0), - ([0, 1], 1.0), ([0, 2], 1.0), ([1, 3], 1.0), - ([2, 3], 1.0), ([1, 2], 1.4142135623730951), - ([0, 3], 1.4142135623730951)] - assert simplex_tree.get_star([0]) == \ - [([0], 0.0), ([0, 1], 1.0), ([0, 2], 1.0), - ([0, 3], 1.4142135623730951)] - assert simplex_tree.get_cofaces([0], 1) == \ - [([0, 1], 1.0), ([0, 2], 1.0), - ([0, 3], 1.4142135623730951)] + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([1, 3], 1.0), + ([2, 3], 1.0), + ([1, 2], 1.4142135623730951), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_star([0]) == [ + ([0], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_cofaces([0], 1) == [ + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + def test_filtered_rips_from_points(): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] @@ -55,10 +69,10 @@ def test_filtered_rips_from_points(): assert simplex_tree.num_simplices() == 8 assert simplex_tree.num_vertices() == 4 + def test_sparse_filtered_rips_from_points(): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0, - sparse=.001) + filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0, sparse=0.001) simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) @@ -68,13 +82,10 @@ def test_sparse_filtered_rips_from_points(): assert simplex_tree.num_simplices() == 8 assert simplex_tree.num_vertices() == 4 + def test_rips_from_distance_matrix(): - distance_matrix = [[0], - [1, 0], - [1, sqrt(2), 0], - [sqrt(2), 1, 1, 0]] - rips_complex = RipsComplex(distance_matrix=distance_matrix, - max_edge_length=42) + distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] + rips_complex = RipsComplex(distance_matrix=distance_matrix, max_edge_length=42) simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) @@ -84,25 +95,34 @@ def test_rips_from_distance_matrix(): assert simplex_tree.num_simplices() == 10 assert simplex_tree.num_vertices() == 4 - assert simplex_tree.get_filtration() == \ - [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0), - ([0, 1], 1.0), ([0, 2], 1.0), ([1, 3], 1.0), - ([2, 3], 1.0), ([1, 2], 1.4142135623730951), - ([0, 3], 1.4142135623730951)] - assert simplex_tree.get_star([0]) == \ - [([0], 0.0), ([0, 1], 1.0), ([0, 2], 1.0), - ([0, 3], 1.4142135623730951)] - assert simplex_tree.get_cofaces([0], 1) == \ - [([0, 1], 1.0), ([0, 2], 1.0), - ([0, 3], 1.4142135623730951)] + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([1, 3], 1.0), + ([2, 3], 1.0), + ([1, 2], 1.4142135623730951), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_star([0]) == [ + ([0], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_cofaces([0], 1) == [ + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + def test_filtered_rips_from_distance_matrix(): - distance_matrix = [[0], - [1, 0], - [1, sqrt(2), 0], - [sqrt(2), 1, 1, 0]] - filtered_rips = RipsComplex(distance_matrix=distance_matrix, - max_edge_length=1.0) + distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] + filtered_rips = RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0) simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) diff --git a/src/cython/test/test_simplex_tree.py b/src/cython/test/test_simplex_tree.py index 5c903d93..8d8971c1 100755 --- a/src/cython/test/test_simplex_tree.py +++ b/src/cython/test/test_simplex_tree.py @@ -39,7 +39,7 @@ def test_insertion(): assert st.find([0]) == True assert st.find([1]) == True assert st.find([2]) == True - assert st.find([3]) == False + assert st.find([3]) == False assert st.find([0, 3]) == False assert st.find([1, 3]) == False assert st.find([2, 3]) == False @@ -55,21 +55,36 @@ def test_insertion(): assert st.filtration([1]) == 0.0 # skeleton test - assert st.get_skeleton(2) == \ - [([0, 1, 2], 4.0), ([0, 1], 0.0), ([0, 2], 4.0), - ([0], 0.0), ([1, 2], 4.0), ([1], 0.0), ([2], 4.0)] - assert st.get_skeleton(1) == \ - [([0, 1], 0.0), ([0, 2], 4.0), ([0], 0.0), - ([1, 2], 4.0), ([1], 0.0), ([2], 4.0)] - assert st.get_skeleton(0) == \ - [([0], 0.0), ([1], 0.0), ([2], 4.0)] + assert st.get_skeleton(2) == [ + ([0, 1, 2], 4.0), + ([0, 1], 0.0), + ([0, 2], 4.0), + ([0], 0.0), + ([1, 2], 4.0), + ([1], 0.0), + ([2], 4.0), + ] + assert st.get_skeleton(1) == [ + ([0, 1], 0.0), + ([0, 2], 4.0), + ([0], 0.0), + ([1, 2], 4.0), + ([1], 0.0), + ([2], 4.0), + ] + assert st.get_skeleton(0) == [([0], 0.0), ([1], 0.0), ([2], 4.0)] # remove_maximal_simplex test assert st.get_cofaces([0, 1, 2], 1) == [] st.remove_maximal_simplex([0, 1, 2]) - assert st.get_skeleton(2) == \ - [([0, 1], 0.0), ([0, 2], 4.0), ([0], 0.0), - ([1, 2], 4.0), ([1], 0.0), ([2], 4.0)] + assert st.get_skeleton(2) == [ + ([0, 1], 0.0), + ([0, 2], 4.0), + ([0], 0.0), + ([1, 2], 4.0), + ([1], 0.0), + ([2], 4.0), + ] assert st.find([0, 1, 2]) == False assert st.find([0, 1]) == True assert st.find([0, 2]) == True @@ -78,7 +93,10 @@ def test_insertion(): assert st.find([2]) == True st.initialize_filtration() - assert st.persistence(persistence_dim_max = True) == [(1, (4.0, float('inf'))), (0, (0.0, float('inf')))] + assert st.persistence(persistence_dim_max=True) == [ + (1, (4.0, float("inf"))), + (0, (0.0, float("inf"))), + ] assert st.__is_persistence_defined() == True assert st.betti_numbers() == [1, 1] @@ -88,6 +106,7 @@ def test_insertion(): assert st.persistent_betti_numbers(4.0, 10000.0) == [1, 1] assert st.persistent_betti_numbers(9999.0, 10000.0) == [1, 1] + def test_expansion(): st = SimplexTree() assert st.__is_defined() == True @@ -107,21 +126,56 @@ def test_expansion(): assert st.num_vertices() == 7 assert st.num_simplices() == 17 - assert st.get_filtration() == [([2], 0.1), ([3], 0.1), ([2, 3], 0.1), - ([0], 0.2), ([0, 2], 0.2), ([1], 0.3), ([0, 1], 0.3), ([1, 3], 0.4), - ([1, 2], 0.5), ([5], 0.6), ([6], 0.6), ([5, 6], 0.6), ([4], 0.7), - ([2, 4], 0.7), ([0, 3], 0.8), ([4, 6], 0.9), ([3, 6], 1.0)] + assert st.get_filtration() == [ + ([2], 0.1), + ([3], 0.1), + ([2, 3], 0.1), + ([0], 0.2), + ([0, 2], 0.2), + ([1], 0.3), + ([0, 1], 0.3), + ([1, 3], 0.4), + ([1, 2], 0.5), + ([5], 0.6), + ([6], 0.6), + ([5, 6], 0.6), + ([4], 0.7), + ([2, 4], 0.7), + ([0, 3], 0.8), + ([4, 6], 0.9), + ([3, 6], 1.0), + ] st.expansion(3) assert st.num_vertices() == 7 assert st.num_simplices() == 22 st.initialize_filtration() - assert st.get_filtration() == [([2], 0.1), ([3], 0.1), ([2, 3], 0.1), - ([0], 0.2), ([0, 2], 0.2), ([1], 0.3), ([0, 1], 0.3), ([1, 3], 0.4), - ([1, 2], 0.5), ([0, 1, 2], 0.5), ([1, 2, 3], 0.5), ([5], 0.6), ([6], 0.6), - ([5, 6], 0.6), ([4], 0.7), ([2, 4], 0.7), ([0, 3], 0.8), ([0, 1, 3], 0.8), - ([0, 2, 3], 0.8), ([0, 1, 2, 3], 0.8), ([4, 6], 0.9), ([3, 6], 1.0)] + assert st.get_filtration() == [ + ([2], 0.1), + ([3], 0.1), + ([2, 3], 0.1), + ([0], 0.2), + ([0, 2], 0.2), + ([1], 0.3), + ([0, 1], 0.3), + ([1, 3], 0.4), + ([1, 2], 0.5), + ([0, 1, 2], 0.5), + ([1, 2, 3], 0.5), + ([5], 0.6), + ([6], 0.6), + ([5, 6], 0.6), + ([4], 0.7), + ([2, 4], 0.7), + ([0, 3], 0.8), + ([0, 1, 3], 0.8), + ([0, 2, 3], 0.8), + ([0, 1, 2, 3], 0.8), + ([4, 6], 0.9), + ([3, 6], 1.0), + ] + def test_automatic_dimension(): st = SimplexTree() @@ -129,8 +183,8 @@ def test_automatic_dimension(): assert st.__is_persistence_defined() == False # insert test - assert st.insert([0,1,3], filtration=0.5) == True - assert st.insert([0,1,2], filtration=1.) == True + assert st.insert([0, 1, 3], filtration=0.5) == True + assert st.insert([0, 1, 2], filtration=1.0) == True assert st.num_vertices() == 4 assert st.num_simplices() == 11 @@ -150,6 +204,7 @@ def test_automatic_dimension(): assert st.dimension() == 1 assert st.upper_bound_dimension() == 1 + def test_make_filtration_non_decreasing(): st = SimplexTree() assert st.__is_defined() == True @@ -171,25 +226,25 @@ def test_make_filtration_non_decreasing(): # Because of non decreasing property of simplex tree, { 0 } , { 1 } and # { 0, 1 } are going to be set from value 2.0 to 1.0 - st.insert([0, 1, 6, 7], filtration=1.0); + st.insert([0, 1, 6, 7], filtration=1.0) assert st.make_filtration_non_decreasing() == False # Modify specific values to test make_filtration_non_decreasing - st.assign_filtration([0,1,6,7], 0.8); - st.assign_filtration([0,1,6], 0.9); - st.assign_filtration([0,6], 0.6); - st.assign_filtration([3,4,5], 1.2); - st.assign_filtration([3,4], 1.1); - st.assign_filtration([4,5], 1.99); + st.assign_filtration([0, 1, 6, 7], 0.8) + st.assign_filtration([0, 1, 6], 0.9) + st.assign_filtration([0, 6], 0.6) + st.assign_filtration([3, 4, 5], 1.2) + st.assign_filtration([3, 4], 1.1) + st.assign_filtration([4, 5], 1.99) assert st.make_filtration_non_decreasing() == True - assert st.filtration([0,1,6,7]) == 1. - assert st.filtration([0,1,6]) == 1. - assert st.filtration([0,1]) == 1. - assert st.filtration([0]) == 1. - assert st.filtration([1]) == 1. - assert st.filtration([3,4,5]) == 2. - assert st.filtration([3,4]) == 2. - assert st.filtration([4,5]) == 2. + assert st.filtration([0, 1, 6, 7]) == 1.0 + assert st.filtration([0, 1, 6]) == 1.0 + assert st.filtration([0, 1]) == 1.0 + assert st.filtration([0]) == 1.0 + assert st.filtration([1]) == 1.0 + assert st.filtration([3, 4, 5]) == 2.0 + assert st.filtration([3, 4]) == 2.0 + assert st.filtration([4, 5]) == 2.0 diff --git a/src/cython/test/test_subsampling.py b/src/cython/test/test_subsampling.py index f7a97539..c816e203 100755 --- a/src/cython/test/test_subsampling.py +++ b/src/cython/test/test_subsampling.py @@ -28,45 +28,72 @@ def test_write_off_file_for_tests(): file.write("9.0 17.0\n") file.close() + def test_simple_choose_n_farthest_points_with_a_starting_point(): - point_set = [[0,1], [0,0], [1,0], [1,1]] + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] i = 0 for point in point_set: # The iteration starts with the given starting point - sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 1, starting_point = i) + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=1, starting_point=i + ) assert sub_set[0] == point_set[i] i = i + 1 # The iteration finds then the farthest - sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 1) + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=1 + ) assert sub_set[1] == point_set[3] - sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 3) + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=3 + ) assert sub_set[1] == point_set[1] - sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 0) + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=0 + ) assert sub_set[1] == point_set[2] - sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 2) + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=2 + ) assert sub_set[1] == point_set[0] # Test the limits - assert gudhi.choose_n_farthest_points(points = [], nb_points = 0, starting_point = 0) == [] - assert gudhi.choose_n_farthest_points(points = [], nb_points = 1, starting_point = 0) == [] - assert gudhi.choose_n_farthest_points(points = [], nb_points = 0, starting_point = 1) == [] - assert gudhi.choose_n_farthest_points(points = [], nb_points = 1, starting_point = 1) == [] + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=0) == [] + ) + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=0) == [] + ) + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=1) == [] + ) + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=1) == [] + ) # From off file test - for i in range (0, 7): - assert len(gudhi.choose_n_farthest_points(off_file = 'subsample.off', nb_points = i, starting_point = i)) == i + for i in range(0, 7): + assert ( + len( + gudhi.choose_n_farthest_points( + off_file="subsample.off", nb_points=i, starting_point=i + ) + ) + == i + ) + def test_simple_choose_n_farthest_points_randomed(): - point_set = [[0,1], [0,0], [1,0], [1,1]] + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] # Test the limits - assert gudhi.choose_n_farthest_points(points = [], nb_points = 0) == [] - assert gudhi.choose_n_farthest_points(points = [], nb_points = 1) == [] - assert gudhi.choose_n_farthest_points(points = point_set, nb_points = 0) == [] + assert gudhi.choose_n_farthest_points(points=[], nb_points=0) == [] + assert gudhi.choose_n_farthest_points(points=[], nb_points=1) == [] + assert gudhi.choose_n_farthest_points(points=point_set, nb_points=0) == [] # Go furter than point set on purpose - for iter in range(1,10): - sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = iter) + for iter in range(1, 10): + sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=iter) for sub in sub_set: found = False for point in point_set: @@ -76,19 +103,23 @@ def test_simple_choose_n_farthest_points_randomed(): assert found == True # From off file test - for i in range (0, 7): - assert len(gudhi.choose_n_farthest_points(off_file = 'subsample.off', nb_points = i)) == i + for i in range(0, 7): + assert ( + len(gudhi.choose_n_farthest_points(off_file="subsample.off", nb_points=i)) + == i + ) + def test_simple_pick_n_random_points(): - point_set = [[0,1], [0,0], [1,0], [1,1]] + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] # Test the limits - assert gudhi.pick_n_random_points(points = [], nb_points = 0) == [] - assert gudhi.pick_n_random_points(points = [], nb_points = 1) == [] - assert gudhi.pick_n_random_points(points = point_set, nb_points = 0) == [] + assert gudhi.pick_n_random_points(points=[], nb_points=0) == [] + assert gudhi.pick_n_random_points(points=[], nb_points=1) == [] + assert gudhi.pick_n_random_points(points=point_set, nb_points=0) == [] # Go furter than point set on purpose - for iter in range(1,10): - sub_set = gudhi.pick_n_random_points(points = point_set, nb_points = iter) + for iter in range(1, 10): + sub_set = gudhi.pick_n_random_points(points=point_set, nb_points=iter) print(5) for sub in sub_set: found = False @@ -99,23 +130,50 @@ def test_simple_pick_n_random_points(): assert found == True # From off file test - for i in range (0, 7): - assert len(gudhi.pick_n_random_points(off_file = 'subsample.off', nb_points = i)) == i + for i in range(0, 7): + assert ( + len(gudhi.pick_n_random_points(off_file="subsample.off", nb_points=i)) == i + ) + def test_simple_sparsify_points(): - point_set = [[0,1], [0,0], [1,0], [1,1]] + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] # Test the limits # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 0.0) == [] # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 10.0) == [] - assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 0.0) == point_set - assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 1.0) == point_set - assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 2.0) == [[0,1], [1,0]] - assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 2.01) == [[0,1]] - - assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 0.0)) == 7 - assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 30.0)) == 5 - assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 40.0)) == 4 - assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 90.0)) == 3 - assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 100.0)) == 2 - assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 325.0)) == 2 - assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 325.01)) == 1 + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=0.0) == point_set + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=1.0) == point_set + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.0) == [ + [0, 1], + [1, 0], + ] + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.01) == [[0, 1]] + + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=0.0)) + == 7 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=30.0)) + == 5 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.0)) + == 4 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=90.0)) + == 3 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=100.0)) + == 2 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.0)) + == 2 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.01)) + == 1 + ) diff --git a/src/cython/test/test_tangential_complex.py b/src/cython/test/test_tangential_complex.py index aa3820d1..0f828d8e 100755 --- a/src/cython/test/test_tangential_complex.py +++ b/src/cython/test/test_tangential_complex.py @@ -17,7 +17,7 @@ __license__ = "MIT" def test_tangential(): point_list = [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]] - tc = TangentialComplex(intrisic_dim = 1, points=point_list) + tc = TangentialComplex(intrisic_dim=1, points=point_list) assert tc.__is_defined() == True assert tc.num_vertices() == 4 assert tc.num_simplices() == 0 @@ -36,11 +36,17 @@ def test_tangential(): assert st.num_simplices() == 6 assert st.num_vertices() == 4 - - assert st.get_filtration() == \ - [([0], 0.0), ([1], 0.0), ([2], 0.0), ([0, 2], 0.0), ([3], 0.0), ([1, 3], 0.0)] + + assert st.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([0, 2], 0.0), + ([3], 0.0), + ([1, 3], 0.0), + ] assert st.get_cofaces([0], 1) == [([0, 2], 0.0)] - + assert point_list[0] == tc.get_point(0) assert point_list[1] == tc.get_point(1) assert point_list[2] == tc.get_point(2) diff --git a/src/cython/test/test_witness_complex.py b/src/cython/test/test_witness_complex.py index 70511107..36ced635 100755 --- a/src/cython/test/test_witness_complex.py +++ b/src/cython/test/test_witness_complex.py @@ -19,32 +19,44 @@ def test_empty_witness_complex(): witness = WitnessComplex() assert witness.__is_defined() == False + def test_witness_complex(): - nearest_landmark_table = [[[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], - [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], - [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], - [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], - [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]]] + nearest_landmark_table = [ + [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], + [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], + [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], + [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], + [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]], + ] witness_complex = WitnessComplex(nearest_landmark_table=nearest_landmark_table) simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1) assert simplex_tree.num_vertices() == 5 assert simplex_tree.num_simplices() == 31 - simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1, limit_dimension=2) + simplex_tree = witness_complex.create_simplex_tree( + max_alpha_square=4.1, limit_dimension=2 + ) assert simplex_tree.num_vertices() == 5 assert simplex_tree.num_simplices() == 25 -def test_strong_witness_complex(): - nearest_landmark_table = [[[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], - [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], - [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], - [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], - [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]]] - strong_witness_complex = StrongWitnessComplex(nearest_landmark_table=nearest_landmark_table) +def test_strong_witness_complex(): + nearest_landmark_table = [ + [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], + [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], + [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], + [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], + [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]], + ] + + strong_witness_complex = StrongWitnessComplex( + nearest_landmark_table=nearest_landmark_table + ) simplex_tree = strong_witness_complex.create_simplex_tree(max_alpha_square=4.1) assert simplex_tree.num_vertices() == 5 assert simplex_tree.num_simplices() == 31 - simplex_tree = strong_witness_complex.create_simplex_tree(max_alpha_square=4.1, limit_dimension=2) + simplex_tree = strong_witness_complex.create_simplex_tree( + max_alpha_square=4.1, limit_dimension=2 + ) assert simplex_tree.num_vertices() == 5 assert simplex_tree.num_simplices() == 25 -- cgit v1.2.3 From df294a362cfb6d57a881650cb0f0914812afc3aa Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 27 Jun 2019 09:51:51 +0200 Subject: Malformed table --- src/cython/doc/witness_complex_sum.inc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cython/doc/witness_complex_sum.inc b/src/cython/doc/witness_complex_sum.inc index 8c89b2dd..2be8b220 100644 --- a/src/cython/doc/witness_complex_sum.inc +++ b/src/cython/doc/witness_complex_sum.inc @@ -6,7 +6,7 @@ | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | | :figclass: align-center | The data structure is described in | | - | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 `_ for Euclidean versions only) | + | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 `_ for Euclidean versions only) | | | | | | | | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 for Euclidean versions only | +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ -- cgit v1.2.3 From e6676dce1b5faa2c61707968e1e0588e5c47edbf Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 27 Jun 2019 17:47:58 +0200 Subject: code review: test if first!=last before dereferencing first --- src/Simplex_tree/include/gudhi/Simplex_tree.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 80d8dfb9..519703e6 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -821,7 +821,7 @@ class Simplex_tree { } } // Next iteration to avoid consecutive equal values - while ((vertex_one == *first) && (first < last)) { + while ((first < last) && (vertex_one == *first)) { ++first; } // End of insertion -- cgit v1.2.3 From 6aee9ea232820f3fefd3cfd8d194834c4ed9fd22 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 27 Jun 2019 18:45:50 +0200 Subject: Code review : call std::unique right after std::sort. Repetition has been removed --- src/Simplex_tree/include/gudhi/Simplex_tree.h | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 519703e6..9d6e50c6 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -775,27 +775,18 @@ class Simplex_tree { std::vector copy; copy.clear(); copy.insert(copy.end(), first, last); - std::sort(std::begin(copy), std::end(copy)); + std::sort(copy.begin(), copy.end()); + auto last_unique = std::unique(copy.begin(), copy.end()); + copy.erase(last_unique, copy.end()); GUDHI_CHECK_code( for (Vertex_handle v : copy) GUDHI_CHECK(v != null_vertex(), "cannot use the dummy null_vertex() as a real vertex"); ) - return insert_simplex_and_subfaces_sorted(copy, filtration); + return rec_insert_simplex_and_subfaces_sorted(root(), copy.begin(), copy.end(), filtration, 0); } private: - /// Same as insert_simplex_and_subfaces but assumes that the range of vertices is sorted - template> - std::pair insert_simplex_and_subfaces_sorted(const ForwardVertexRange& Nsimplex, - Filtration_value filt = 0) { - auto first = std::begin(Nsimplex); - auto last = std::end(Nsimplex); - if (first == last) - return { null_simplex(), true }; // FIXME: false would make more sense to me. - GUDHI_CHECK(std::is_sorted(first, last), "simplex vertices listed in unsorted order"); - return rec_insert_simplex_and_subfaces_sorted(root(), first, last, filt, 0); - } // To insert {1,2,3,4}, we insert {2,3,4} twice, once at the root, and once below 1. template std::pair rec_insert_simplex_and_subfaces_sorted(Siblings* sib, -- cgit v1.2.3 From 323ff3c2ac18e48e930361351d581927d30043af Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau <10407034+VincentRouvreau@users.noreply.github.com> Date: Thu, 27 Jun 2019 21:48:10 +0200 Subject: Roll back modification. This shall be done in master. --- .travis.yml | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index aa0068f4..b8a080ea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,32 +9,32 @@ matrix: include: # A. Mac OSX - os: osx - osx_image: xcode10.2 + osx_image: xcode9.4 compiler: clang env: # 1. Only examples and associated tests - CMAKE_EXAMPLE='ON' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode10.2 + osx_image: xcode9.4 compiler: clang env: # 2. Only unitary tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='ON' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode10.2 + osx_image: xcode9.4 compiler: clang env: # 3. Only utilities and associated tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='ON' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode10.2 + osx_image: xcode9.4 compiler: clang env: # 4. Only doxygen documentation - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='doxygen' # Issue with sphinx-build with sphinx 2.0.1 # - os: osx - # osx_image: xcode10.2 + # osx_image: xcode9.4 # compiler: clang # env: # # 5. Only Python, associated tests and sphinx documentation @@ -43,7 +43,6 @@ matrix: cache: directories: - $HOME/.cache/pip - - $HOME/Library/Caches/Homebrew addons: homebrew: @@ -59,9 +58,6 @@ addons: - cgal - python3 -before_cache: - - brew cleanup - # When installing through libcgal-dev apt, CMake Error at CGAL Exports.cmake The imported target "CGAL::CGAL Qt5" references the file install: - python3 -m pip install --upgrade pip setuptools wheel -- cgit v1.2.3 From dc6fac3462f7e3490dcbf092e65321ac19bca834 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 27 Jun 2019 21:59:17 +0200 Subject: Fix travis issue with a newer version. Brew update issue --- .travis.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index b8a080ea..78696e39 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,36 +9,36 @@ matrix: include: # A. Mac OSX - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 1. Only examples and associated tests - CMAKE_EXAMPLE='ON' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 2. Only unitary tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='ON' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 3. Only utilities and associated tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='ON' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - os: osx - osx_image: xcode9.4 + osx_image: xcode10.2 compiler: clang env: # 4. Only doxygen documentation - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='doxygen' # Issue with sphinx-build with sphinx 2.0.1 - # - os: osx - # osx_image: xcode9.4 - # compiler: clang - # env: - # # 5. Only Python, associated tests and sphinx documentation - # - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='ON' MAKE_TARGET='test sphinx' + - os: osx + osx_image: xcode10.2 + compiler: clang + env: + # 5. Only Python, associated tests and sphinx documentation + - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='ON' MAKE_TARGET='test sphinx' cache: directories: -- cgit v1.2.3 From 6f7fe1555f2a6032c8d1f43753a07441a3f4143b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 27 Jun 2019 23:14:44 +0200 Subject: DEbug traces for travis --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 78696e39..42f95e27 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,7 +38,7 @@ matrix: compiler: clang env: # 5. Only Python, associated tests and sphinx documentation - - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='ON' MAKE_TARGET='test sphinx' + - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='ON' MAKE_TARGET='' cache: directories: @@ -69,6 +69,7 @@ script: - cd build - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=${CMAKE_EXAMPLE} -DWITH_GUDHI_TEST=${CMAKE_TEST} -DWITH_GUDHI_UTILITIES=${CMAKE_UTILITIES} -DWITH_GUDHI_PYTHON=${CMAKE_PYTHON} -DUSER_VERSION_DIR=version -DPython_ADDITIONAL_VERSIONS=3 .. - make all ${MAKE_TARGET} + - ctest -R sphinx_py_test -V - cd .. notifications: -- cgit v1.2.3 From 695655d5cb347d809733a5fa1c0458fb98fd3272 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 27 Jun 2019 23:51:01 +0200 Subject: Rollback debug traces for travis --- .travis.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 42f95e27..c07ac421 100644 --- a/.travis.yml +++ b/.travis.yml @@ -33,12 +33,12 @@ matrix: # 4. Only doxygen documentation - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='doxygen' # Issue with sphinx-build with sphinx 2.0.1 - - os: osx - osx_image: xcode10.2 - compiler: clang - env: - # 5. Only Python, associated tests and sphinx documentation - - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='ON' MAKE_TARGET='' + # - os: osx + # osx_image: xcode10.2 + # compiler: clang + # env: + # # 5. Only Python, associated tests and sphinx documentation + # - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='ON' MAKE_TARGET='test sphinx' cache: directories: @@ -69,7 +69,6 @@ script: - cd build - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=${CMAKE_EXAMPLE} -DWITH_GUDHI_TEST=${CMAKE_TEST} -DWITH_GUDHI_UTILITIES=${CMAKE_UTILITIES} -DWITH_GUDHI_PYTHON=${CMAKE_PYTHON} -DUSER_VERSION_DIR=version -DPython_ADDITIONAL_VERSIONS=3 .. - make all ${MAKE_TARGET} - - ctest -R sphinx_py_test -V - cd .. notifications: -- cgit v1.2.3 From e9e875a4ec84b7ca28b350f8cd0b9024246b00e2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 28 Jun 2019 11:19:04 +0200 Subject: Try to cache homebrew --- .travis.yml | 36 +++++++++++++++--------------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/.travis.yml b/.travis.yml index c07ac421..bf268057 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,47 +5,37 @@ sudo: required git: depth: 3 +os: osx +osx_image: xcode10.2 +compiler: clang + matrix: include: - # A. Mac OSX - - os: osx - osx_image: xcode10.2 - compiler: clang - env: + - env: # 1. Only examples and associated tests - CMAKE_EXAMPLE='ON' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - - os: osx - osx_image: xcode10.2 - compiler: clang - env: + - env: # 2. Only unitary tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='ON' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - - os: osx - osx_image: xcode10.2 - compiler: clang - env: + - env: # 3. Only utilities and associated tests - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='ON' CMAKE_PYTHON='OFF' MAKE_TARGET='test' - - os: osx - osx_image: xcode10.2 - compiler: clang - env: + - env: # 4. Only doxygen documentation - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='OFF' MAKE_TARGET='doxygen' # Issue with sphinx-build with sphinx 2.0.1 - # - os: osx - # osx_image: xcode10.2 - # compiler: clang - # env: + # - env: # # 5. Only Python, associated tests and sphinx documentation # - CMAKE_EXAMPLE='OFF' CMAKE_TEST='OFF' CMAKE_UTILITIES='OFF' CMAKE_PYTHON='ON' MAKE_TARGET='test sphinx' cache: directories: - $HOME/.cache/pip + - $HOME/Library/Caches/Homebrew addons: homebrew: + update: true packages: - cmake - graphviz @@ -58,6 +48,10 @@ addons: - cgal - python3 +before_cache: + - rm -f $HOME/.cache/pip/log/debug.log + - brew cleanup + # When installing through libcgal-dev apt, CMake Error at CGAL Exports.cmake The imported target "CGAL::CGAL Qt5" references the file install: - python3 -m pip install --upgrade pip setuptools wheel -- cgit v1.2.3 From 4b5f5c47e681a92f85d390cc5763f39a033ba354 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 28 Jun 2019 16:11:49 +0200 Subject: We can now roll back rec_insert_simplex_and_subfaces_sorted and dimension computation --- src/Simplex_tree/include/gudhi/Simplex_tree.h | 28 +++++++++------------------ 1 file changed, 9 insertions(+), 19 deletions(-) diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 9d6e50c6..f1697955 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -782,18 +782,19 @@ class Simplex_tree { for (Vertex_handle v : copy) GUDHI_CHECK(v != null_vertex(), "cannot use the dummy null_vertex() as a real vertex"); ) + // Update dimension if needed. We could wait to see if the insertion succeeds, but I doubt there is much to gain. + dimension_ = (std::max)(dimension_, static_cast(std::distance(copy.begin(), copy.end())) - 1); - return rec_insert_simplex_and_subfaces_sorted(root(), copy.begin(), copy.end(), filtration, 0); + return rec_insert_simplex_and_subfaces_sorted(root(), copy.begin(), copy.end(), filtration); } private: // To insert {1,2,3,4}, we insert {2,3,4} twice, once at the root, and once below 1. template std::pair rec_insert_simplex_and_subfaces_sorted(Siblings* sib, - ForwardVertexIterator first, - ForwardVertexIterator last, - Filtration_value filt, - int dimension) { + ForwardVertexIterator first, + ForwardVertexIterator last, + Filtration_value filt) { // An alternative strategy would be: // - try to find the complete simplex, if found (and low filtration) exit // - insert all the vertices at once in sib @@ -811,24 +812,13 @@ class Simplex_tree { insertion_result.first = null_simplex(); } } - // Next iteration to avoid consecutive equal values - while ((first < last) && (vertex_one == *first)) { - ++first; - } - // End of insertion - if (first == last) { - dimension_ = (std::max)(dimension_, dimension); - return insertion_result; - } + if (++first == last) return insertion_result; if (!has_children(simplex_one)) // TODO: have special code here, we know we are building the whole subtree from scratch. simplex_one->second.assign_children(new Siblings(sib, vertex_one)); - ++dimension; - auto res = rec_insert_simplex_and_subfaces_sorted(simplex_one->second.children(), first, last, filt, dimension); + auto res = rec_insert_simplex_and_subfaces_sorted(simplex_one->second.children(), first, last, filt); // No need to continue if the full simplex was already there with a low enough filtration value. - if (res.first != null_simplex()) { - rec_insert_simplex_and_subfaces_sorted(sib, first, last, filt, dimension); - } + if (res.first != null_simplex()) rec_insert_simplex_and_subfaces_sorted(sib, first, last, filt); return res; } -- cgit v1.2.3 From 642c4d7dcfca0d28bfed72448bc03502228af6da Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 28 Jun 2019 17:12:26 +0200 Subject: Code review: Keep the comment that returning true is strange --- src/Simplex_tree/include/gudhi/Simplex_tree.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index f1697955..f7bb720c 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -765,7 +765,7 @@ class Simplex_tree { auto last = std::end(Nsimplex); if (first == last) - return { null_simplex(), true }; // ----->> + return { null_simplex(), true }; // FIXME: false would make more sense to me. // Copy before sorting // Thread local is not available on XCode version < V.8 - It will slow down computation -- cgit v1.2.3 From 9cfde740f445aadd90f3ad8493dd79cf9cb15aba Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jul 2019 11:30:54 +0200 Subject: Code review: it is better to keep GPL for modules depending on CGAL --- src/cython/cython/alpha_complex.pyx | 2 +- src/cython/cython/bottleneck_distance.pyx | 2 +- src/cython/cython/euclidean_strong_witness_complex.pyx | 2 +- src/cython/cython/euclidean_witness_complex.pyx | 2 +- src/cython/cython/nerve_gic.pyx | 2 +- src/cython/cython/subsampling.pyx | 2 +- src/cython/cython/tangential_complex.pyx | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/cython/cython/alpha_complex.pyx b/src/cython/cython/alpha_complex.pyx index 48c2a046..249d51d0 100644 --- a/src/cython/cython/alpha_complex.pyx +++ b/src/cython/cython/alpha_complex.pyx @@ -17,7 +17,7 @@ import os __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" +__license__ = "GPL v3" cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": diff --git a/src/cython/cython/bottleneck_distance.pyx b/src/cython/cython/bottleneck_distance.pyx index 455a8112..4b378cbc 100644 --- a/src/cython/cython/bottleneck_distance.pyx +++ b/src/cython/cython/bottleneck_distance.pyx @@ -15,7 +15,7 @@ import os __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" +__license__ = "GPL v3" cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) diff --git a/src/cython/cython/euclidean_strong_witness_complex.pyx b/src/cython/cython/euclidean_strong_witness_complex.pyx index e670e9af..26bd8375 100644 --- a/src/cython/cython/euclidean_strong_witness_complex.pyx +++ b/src/cython/cython/euclidean_strong_witness_complex.pyx @@ -14,7 +14,7 @@ from libcpp.utility cimport pair __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" +__license__ = "GPL v3" cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi": cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface": diff --git a/src/cython/cython/euclidean_witness_complex.pyx b/src/cython/cython/euclidean_witness_complex.pyx index 192f0221..e687c6f3 100644 --- a/src/cython/cython/euclidean_witness_complex.pyx +++ b/src/cython/cython/euclidean_witness_complex.pyx @@ -14,7 +14,7 @@ from libcpp.utility cimport pair __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" +__license__ = "GPL v3" cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi": cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface": diff --git a/src/cython/cython/nerve_gic.pyx b/src/cython/cython/nerve_gic.pyx index c81125b7..3c8f1200 100644 --- a/src/cython/cython/nerve_gic.pyx +++ b/src/cython/cython/nerve_gic.pyx @@ -17,7 +17,7 @@ import os __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2018 Inria" -__license__ = "MIT" +__license__ = "GPL v3" cdef extern from "Nerve_gic_interface.h" namespace "Gudhi": cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface": diff --git a/src/cython/cython/subsampling.pyx b/src/cython/cython/subsampling.pyx index 9b80e5e9..1135c1fb 100644 --- a/src/cython/cython/subsampling.pyx +++ b/src/cython/cython/subsampling.pyx @@ -16,7 +16,7 @@ import os __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" +__license__ = "GPL v3" cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling": vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points) diff --git a/src/cython/cython/tangential_complex.pyx b/src/cython/cython/tangential_complex.pyx index 1e20de30..00a84810 100644 --- a/src/cython/cython/tangential_complex.pyx +++ b/src/cython/cython/tangential_complex.pyx @@ -17,7 +17,7 @@ import os __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" +__license__ = "GPL v3" cdef extern from "Tangential_complex_interface.h" namespace "Gudhi": cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface": -- cgit v1.2.3 From c6f2f9aa404e8f8be0c4fc26b4b6f2a507745376 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jul 2019 11:34:47 +0200 Subject: Code review: Contraction only depends on CGAL_queue/Modifiable_priority_queue.h which is LGPL, not GPL --- src/common/doc/main_page.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index fe25b1e7..ea2474be 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -261,7 +261,7 @@ Author: David Salinas
Introduced in: GUDHI 1.1.0
- Copyright: MIT [(GPL v3)](../../licensing/)
+ Copyright: MIT [(LGPL v3)](../../licensing/)
Requires: \ref cgal ≥ 4.11.0 -- cgit v1.2.3 From f637d35677e6b300cd0e30f8e272e1a6173b8895 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jul 2019 14:34:11 +0200 Subject: Code review: As CHOMP is no more delivered, a clear error message shall be displayed when not installed. --- src/GudhUI/model/Model.h | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h index b17a5c8f..48ba9622 100644 --- a/src/GudhUI/model/Model.h +++ b/src/GudhUI/model/Model.h @@ -307,9 +307,15 @@ class Model { private: void run_chomp() { save_complex_in_file_for_chomp(); + std::cout << "POUET" << std::endl; std::cout << "Call CHOMP library\n"; - int returnValue = system("utils/homsimpl chomp.sim"); - std::cout << "CHOMP returns" << returnValue << std::endl; + int returnValue = system("which homsimpl"); + if (returnValue != 0) { + std::cout << "CHOMP must be installed and available in PATH for this functionnality." << std::endl; + return; + } + returnValue = system("homsimpl chomp.sim"); + std::cout << "CHOMP returns " << returnValue << std::endl; } void save_complex_in_file_for_chomp() { -- cgit v1.2.3 From 0a4ece477416422f9d007bbd7eb4c1322d723fcb Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jul 2019 15:32:43 +0200 Subject: Add modification header --- src/GudhUI/model/Model.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h index 48ba9622..e820a46e 100644 --- a/src/GudhUI/model/Model.h +++ b/src/GudhUI/model/Model.h @@ -6,6 +6,8 @@ * * Modification(s): * - YYYY/MM Author: Description of the modification + * - 2019/07 Vincent Rouvreau: homsimpl (from CHOMP) is no more delivered. + * Error message when not previously installed. */ #ifndef MODEL_MODEL_H_ @@ -307,7 +309,6 @@ class Model { private: void run_chomp() { save_complex_in_file_for_chomp(); - std::cout << "POUET" << std::endl; std::cout << "Call CHOMP library\n"; int returnValue = system("which homsimpl"); if (returnValue != 0) { -- cgit v1.2.3 From 4d8dc4477942d196a304e71db4918ac71862e597 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jul 2019 16:39:34 +0200 Subject: Code review: no need to check 'where homsimpl' --- src/GudhUI/model/Model.h | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h index e820a46e..dd9bdaab 100644 --- a/src/GudhUI/model/Model.h +++ b/src/GudhUI/model/Model.h @@ -310,13 +310,11 @@ class Model { void run_chomp() { save_complex_in_file_for_chomp(); std::cout << "Call CHOMP library\n"; - int returnValue = system("which homsimpl"); + int returnValue = system("homsimpl chomp.sim"); if (returnValue != 0) { - std::cout << "CHOMP must be installed and available in PATH for this functionnality." << std::endl; - return; + std::cout << "homsimpl (from CHOMP) failed. Please check it is installed or available in the PATH." + << std::endl; } - returnValue = system("homsimpl chomp.sim"); - std::cout << "CHOMP returns " << returnValue << std::endl; } void save_complex_in_file_for_chomp() { -- cgit v1.2.3 From de268e32ceb8fc46a36e3f221ef0e7392a587544 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jul 2019 16:28:08 +0200 Subject: Available/missing python modules are now managed by the __init__.py --- src/cython/CMakeLists.txt | 58 +++++++++++++++++------------------------------ src/cython/__init__.py.in | 40 ++++++++++++++++++++++++++++++++ src/cython/setup.py.in | 10 ++++---- 3 files changed, 66 insertions(+), 42 deletions(-) create mode 100644 src/cython/__init__.py.in diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index 509a122e..b69b2c7d 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -33,15 +33,23 @@ endfunction( add_gudhi_debug_info ) if(PYTHONINTERP_FOUND) if(CYTHON_FOUND) - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}off_reader;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}simplex_tree;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}rips_complex;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}cubical_complex;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}periodic_cubical_complex;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}reader_utils;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}witness_complex;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}strong_witness_complex;") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'off_reader', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'simplex_tree', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'rips_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'cubical_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'periodic_cubical_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'persistence_graphical_tools', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'reader_utils', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'witness_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'strong_witness_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'persistence_graphical_tools' ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'bottleneck_distance', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'subsampling', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'tangential_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'alpha_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'euclidean_witness_complex', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'euclidean_strong_witness_complex', ") add_gudhi_debug_info("Python version ${PYTHON_VERSION_STRING}") add_gudhi_debug_info("Cython version ${CYTHON_VERSION}") @@ -57,11 +65,6 @@ if(PYTHONINTERP_FOUND) if(SCIPY_FOUND) add_gudhi_debug_info("Scipy version ${SCIPY_VERSION}") endif() - if(MATPLOTLIB_FOUND AND NUMPY_FOUND AND SCIPY_FOUND) - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;") - else() - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;") - endif() message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_VERSION} - Sphinx is ${SPHINX_PATH}") set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ") @@ -93,43 +96,20 @@ if(PYTHONINTERP_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/bottleneck_distance.pyx'") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}bottleneck_distance;") set(GUDHI_CYTHON_NERVE_GIC "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/nerve_gic.pyx'") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}nerve_gic;") - else() - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}bottleneck_distance;") - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}nerve_gic;") endif () if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_SUBSAMPLING "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/subsampling.pyx'") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}subsampling;") set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/tangential_complex.pyx'") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}tangential_complex;") - else() - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}subsampling;") - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}tangential_complex;") endif () if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_ALPHA_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/alpha_complex.pyx'") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}alpha_complex;") - else() - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}alpha_complex;") endif () if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_witness_complex.pyx'\ninclude '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_strong_witness_complex.pyx'\n") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}euclidean_witness_complex;") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}euclidean_strong_witness_complex;") - else() - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}euclidean_witness_complex;") - set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}euclidean_strong_witness_complex;") endif () - add_gudhi_debug_info("Installed modules are: ${GUDHI_CYTHON_MODULES}") - if(GUDHI_CYTHON_MISSING_MODULES) - add_gudhi_debug_info("Missing modules are: ${GUDHI_CYTHON_MISSING_MODULES}") - endif() - if(CGAL_FOUND) can_cgal_use_cxx11_thread_local() if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT) @@ -213,6 +193,10 @@ if(PYTHONINTERP_FOUND) # Generate gudhi.pyx - Gudhi cython file configure_file(gudhi.pyx.in "${CMAKE_CURRENT_BINARY_DIR}/gudhi.pyx" @ONLY) + # Generate gudhi/__init__.py + file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + configure_file(__init__.py.in "${CMAKE_CURRENT_BINARY_DIR}/gudhi/__init__.py" @ONLY) + add_custom_command( OUTPUT gudhi.so WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} diff --git a/src/cython/__init__.py.in b/src/cython/__init__.py.in new file mode 100644 index 00000000..60ad7865 --- /dev/null +++ b/src/cython/__init__.py.in @@ -0,0 +1,40 @@ +from importlib import import_module + +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "https://gudhi.inria.fr/licensing/" +__version__ = "@GUDHI_VERSION@" +# This variable is used by doctest to find files +__root_source_dir__ = "@CMAKE_SOURCE_DIR@" +__debug_info__ = @GUDHI_CYTHON_DEBUG_INFO@ + +from sys import exc_info +from importlib import import_module + +__all__ = [@GUDHI_CYTHON_MODULES@] + +__available_modules__ = '' +__missing_modules__ = '' + +# try to import * from gudhi.__module_name__ +for __module_name__ in __all__: + try: + __module__ = import_module('gudhi.' + __module_name__) + try: + __to_import__ = __module__.__all__ + except AttributeError: + __to_import__ = [name for name in __module__.__dict__ if not name.startswith('_')] + globals().update({name: __module__.__dict__[name] for name in __to_import__}) + __available_modules__ += __module_name__ + ";" + except: + __missing_modules__ += __module_name__ + ";" diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in index 70c85852..454be9af 100644 --- a/src/cython/setup.py.in +++ b/src/cython/setup.py.in @@ -14,11 +14,11 @@ from numpy import get_include as numpy_get_include __author__ = "GUDHI Editorial Board" __copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" +__license__ = "MIT" -gudhi = Extension( - "gudhi", - sources = ['@CMAKE_CURRENT_BINARY_DIR@/gudhi.pyx',], +simplextree = Extension( + "gudhi.simplextree", + sources = ['@CMAKE_CURRENT_SOURCE_DIR@/cython/simplex_tree.pyx',], language = 'c++', extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@], extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@], @@ -34,7 +34,7 @@ setup( author_email='gudhi-contact@lists.gforge.inria.fr', version='@GUDHI_VERSION@', url='http://gudhi.gforge.inria.fr/', - ext_modules = cythonize(gudhi), + ext_modules = cythonize(simplextree), install_requires = ['cython','numpy >= 1.9',], setup_requires = ['numpy >= 1.9',], ) -- cgit v1.2.3 From 0a4a42e38dffaea03083e39e5eafed2e1f32ff05 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jul 2019 17:14:53 +0200 Subject: move cython in a gudhi directory - like the python package one --- src/cython/CMakeLists.txt | 2 +- src/cython/__init__.py.in | 40 -- src/cython/cython/alpha_complex.pyx | 109 ----- src/cython/cython/bottleneck_distance.pyx | 49 -- src/cython/cython/cubical_complex.pyx | 188 ------- .../cython/euclidean_strong_witness_complex.pyx | 85 ---- src/cython/cython/euclidean_witness_complex.pyx | 85 ---- src/cython/cython/nerve_gic.pyx | 407 ---------------- src/cython/cython/off_reader.pyx | 38 -- src/cython/cython/periodic_cubical_complex.pyx | 190 -------- src/cython/cython/persistence_graphical_tools.py | 420 ---------------- src/cython/cython/reader_utils.pyx | 87 ---- src/cython/cython/rips_complex.pyx | 98 ---- src/cython/cython/simplex_tree.pyx | 542 --------------------- src/cython/cython/strong_witness_complex.pyx | 71 --- src/cython/cython/subsampling.pyx | 130 ----- src/cython/cython/tangential_complex.pyx | 168 ------- src/cython/cython/witness_complex.pyx | 71 --- src/cython/gudhi/__init__.py.in | 40 ++ src/cython/gudhi/alpha_complex.pyx | 115 +++++ src/cython/gudhi/bottleneck_distance.pyx | 49 ++ src/cython/gudhi/cubical_complex.pyx | 188 +++++++ .../gudhi/euclidean_strong_witness_complex.pyx | 85 ++++ src/cython/gudhi/euclidean_witness_complex.pyx | 85 ++++ src/cython/gudhi/nerve_gic.pyx | 407 ++++++++++++++++ src/cython/gudhi/off_reader.pyx | 38 ++ src/cython/gudhi/periodic_cubical_complex.pyx | 190 ++++++++ src/cython/gudhi/persistence_graphical_tools.py | 420 ++++++++++++++++ src/cython/gudhi/reader_utils.pyx | 87 ++++ src/cython/gudhi/rips_complex.pyx | 102 ++++ src/cython/gudhi/simplex_tree.pxd | 46 ++ src/cython/gudhi/simplex_tree.pyx | 518 ++++++++++++++++++++ src/cython/gudhi/strong_witness_complex.pyx | 78 +++ src/cython/gudhi/subsampling.pyx | 130 +++++ src/cython/gudhi/tangential_complex.pyx | 177 +++++++ src/cython/gudhi/witness_complex.pyx | 78 +++ 36 files changed, 2834 insertions(+), 2779 deletions(-) delete mode 100644 src/cython/__init__.py.in delete mode 100644 src/cython/cython/alpha_complex.pyx delete mode 100644 src/cython/cython/bottleneck_distance.pyx delete mode 100644 src/cython/cython/cubical_complex.pyx delete mode 100644 src/cython/cython/euclidean_strong_witness_complex.pyx delete mode 100644 src/cython/cython/euclidean_witness_complex.pyx delete mode 100644 src/cython/cython/nerve_gic.pyx delete mode 100644 src/cython/cython/off_reader.pyx delete mode 100644 src/cython/cython/periodic_cubical_complex.pyx delete mode 100644 src/cython/cython/persistence_graphical_tools.py delete mode 100644 src/cython/cython/reader_utils.pyx delete mode 100644 src/cython/cython/rips_complex.pyx delete mode 100644 src/cython/cython/simplex_tree.pyx delete mode 100644 src/cython/cython/strong_witness_complex.pyx delete mode 100644 src/cython/cython/subsampling.pyx delete mode 100644 src/cython/cython/tangential_complex.pyx delete mode 100644 src/cython/cython/witness_complex.pyx create mode 100644 src/cython/gudhi/__init__.py.in create mode 100644 src/cython/gudhi/alpha_complex.pyx create mode 100644 src/cython/gudhi/bottleneck_distance.pyx create mode 100644 src/cython/gudhi/cubical_complex.pyx create mode 100644 src/cython/gudhi/euclidean_strong_witness_complex.pyx create mode 100644 src/cython/gudhi/euclidean_witness_complex.pyx create mode 100644 src/cython/gudhi/nerve_gic.pyx create mode 100644 src/cython/gudhi/off_reader.pyx create mode 100644 src/cython/gudhi/periodic_cubical_complex.pyx create mode 100644 src/cython/gudhi/persistence_graphical_tools.py create mode 100644 src/cython/gudhi/reader_utils.pyx create mode 100644 src/cython/gudhi/rips_complex.pyx create mode 100644 src/cython/gudhi/simplex_tree.pxd create mode 100644 src/cython/gudhi/simplex_tree.pyx create mode 100644 src/cython/gudhi/strong_witness_complex.pyx create mode 100644 src/cython/gudhi/subsampling.pyx create mode 100644 src/cython/gudhi/tangential_complex.pyx create mode 100644 src/cython/gudhi/witness_complex.pyx diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index b69b2c7d..cd99f70b 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -195,7 +195,7 @@ if(PYTHONINTERP_FOUND) # Generate gudhi/__init__.py file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi") - configure_file(__init__.py.in "${CMAKE_CURRENT_BINARY_DIR}/gudhi/__init__.py" @ONLY) + configure_file("gudhi/__init__.py.in" "${CMAKE_CURRENT_BINARY_DIR}/gudhi/__init__.py" @ONLY) add_custom_command( OUTPUT gudhi.so diff --git a/src/cython/__init__.py.in b/src/cython/__init__.py.in deleted file mode 100644 index 60ad7865..00000000 --- a/src/cython/__init__.py.in +++ /dev/null @@ -1,40 +0,0 @@ -from importlib import import_module - -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "https://gudhi.inria.fr/licensing/" -__version__ = "@GUDHI_VERSION@" -# This variable is used by doctest to find files -__root_source_dir__ = "@CMAKE_SOURCE_DIR@" -__debug_info__ = @GUDHI_CYTHON_DEBUG_INFO@ - -from sys import exc_info -from importlib import import_module - -__all__ = [@GUDHI_CYTHON_MODULES@] - -__available_modules__ = '' -__missing_modules__ = '' - -# try to import * from gudhi.__module_name__ -for __module_name__ in __all__: - try: - __module__ = import_module('gudhi.' + __module_name__) - try: - __to_import__ = __module__.__all__ - except AttributeError: - __to_import__ = [name for name in __module__.__dict__ if not name.startswith('_')] - globals().update({name: __module__.__dict__[name] for name in __to_import__}) - __available_modules__ += __module_name__ + ";" - except: - __missing_modules__ += __module_name__ + ";" diff --git a/src/cython/cython/alpha_complex.pyx b/src/cython/cython/alpha_complex.pyx deleted file mode 100644 index 249d51d0..00000000 --- a/src/cython/cython/alpha_complex.pyx +++ /dev/null @@ -1,109 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": - cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": - Alpha_complex_interface(vector[vector[double]] points) - # bool from_file is a workaround for cython to find the correct signature - Alpha_complex_interface(string off_file, bool from_file) - vector[double] get_point(int vertex) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - -# AlphaComplex python interface -cdef class AlphaComplex: - """AlphaComplex is a simplicial complex constructed from the finite cells - of a Delaunay Triangulation. - - The filtration value of each simplex is computed as the square of the - circumradius of the simplex if the circumsphere is empty (the simplex is - then said to be Gabriel), and as the minimum of the filtration values of - the codimension 1 cofaces that make it not Gabriel otherwise. - - All simplices that have a filtration value strictly greater than a given - alpha squared value are not inserted into the complex. - - .. note:: - - When Alpha_complex is constructed with an infinite value of alpha, the - complex is a Delaunay complex. - - """ - - cdef Alpha_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, points=None, off_file=''): - """AlphaComplex constructor. - - :param points: A list of points in d-Dimension. - :type points: list of list of double - - Or - - :param off_file: An OFF file style name. - :type off_file: string - """ - - # The real cython constructor - def __cinit__(self, points=None, off_file=''): - if off_file is not '': - if os.path.isfile(off_file): - self.thisptr = new Alpha_complex_interface(str.encode(off_file), True) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty Alpha construction - points=[] - self.thisptr = new Alpha_complex_interface(points) - - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if AlphaComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int - :rtype: list of float - :returns: the point. - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - - def create_simplex_tree(self, max_alpha_square=float('inf')): - """ - :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) - return simplex_tree diff --git a/src/cython/cython/bottleneck_distance.pyx b/src/cython/cython/bottleneck_distance.pyx deleted file mode 100644 index 4b378cbc..00000000 --- a/src/cython/cython/bottleneck_distance.pyx +++ /dev/null @@ -1,49 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": - double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) - double bottleneck(vector[pair[double, double]], vector[pair[double, double]]) - -def bottleneck_distance(diagram_1, diagram_2, e=None): - """This function returns the point corresponding to a given vertex. - - :param diagram_1: The first diagram. - :type diagram_1: vector[pair[double, double]] - :param diagram_2: The second diagram. - :type diagram_2: vector[pair[double, double]] - :param e: If `e` is 0, this uses an expensive algorithm to compute the - exact distance. - If `e` is not 0, it asks for an additive `e`-approximation, and - currently also allows a small multiplicative error (the last 2 or 3 - bits of the mantissa may be wrong). This version of the algorithm takes - advantage of the limited precision of `double` and is usually a lot - faster to compute, whatever the value of `e`. - - Thus, by default, `e` is the smallest positive double. - :type e: float - :rtype: float - :returns: the bottleneck distance. - """ - if e is None: - # Default value is the smallest double value (not 0, 0 is for exact version) - return bottleneck(diagram_1, diagram_2) - else: - # Can be 0 for exact version - return bottleneck(diagram_1, diagram_2, e) diff --git a/src/cython/cython/cubical_complex.pyx b/src/cython/cython/cubical_complex.pyx deleted file mode 100644 index 0dc133d1..00000000 --- a/src/cython/cython/cubical_complex.pyx +++ /dev/null @@ -1,188 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -from numpy import array as np_array - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": - cdef cppclass Bitmap_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<>": - Bitmap_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells) - Bitmap_cubical_complex_base_interface(string perseus_file) - int num_simplices() - int dimension() - -cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>": - Cubical_complex_persistence_interface(Bitmap_cubical_complex_base_interface * st, bool persistence_dim_max) - vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) - vector[int] betti_numbers() - vector[int] persistent_betti_numbers(double from_value, double to_value) - vector[pair[double,double]] intervals_in_dimension(int dimension) - -# CubicalComplex python interface -cdef class CubicalComplex: - """The CubicalComplex is an example of a structured complex useful in - computational mathematics (specially rigorous numerics) and image - analysis. - """ - cdef Bitmap_cubical_complex_base_interface * thisptr - - cdef Cubical_complex_persistence_interface * pcohptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, dimensions=None, top_dimensional_cells=None, - perseus_file=''): - """CubicalComplex constructor from dimensions and - top_dimensional_cells or from a Perseus-style file name. - - :param dimensions: A list of number of top dimensional cells. - :type dimensions: list of int - :param top_dimensional_cells: A list of cells filtration values. - :type top_dimensional_cells: list of double - - Or - - :param perseus_file: A Perseus-style file name. - :type perseus_file: string - """ - - # The real cython constructor - def __cinit__(self, dimensions=None, top_dimensional_cells=None, - perseus_file=''): - if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''): - self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells) - elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''): - if os.path.isfile(perseus_file): - self.thisptr = new Bitmap_cubical_complex_base_interface(str.encode(perseus_file)) - else: - print("file " + perseus_file + " not found.") - else: - print("CubicalComplex can be constructed from dimensions and " - "top_dimensional_cells or from a Perseus-style file name.") - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - if self.pcohptr != NULL: - del self.pcohptr - - def __is_defined(self): - """Returns true if CubicalComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def __is_persistence_defined(self): - """Returns true if Persistence pointer is not NULL. - """ - return self.pcohptr != NULL - - def num_simplices(self): - """This function returns the number of all cubes in the complex. - - :returns: int -- the number of all cubes in the complex. - """ - return self.thisptr.num_simplices() - - def dimension(self): - """This function returns the dimension of the complex. - - :returns: int -- the complex dimension. - """ - return self.thisptr.dimension() - - def persistence(self, homology_coeff_field=11, min_persistence=0): - """This function returns the persistence of the complex. - - :param homology_coeff_field: The homology coefficient field. Must be a - prime number - :type homology_coeff_field: int. - :param min_persistence: The minimum persistence value to take into - account (strictly greater than min_persistence). Default value is - 0.0. - Sets min_persistence to -1.0 to see all values. - :type min_persistence: float. - :returns: list of pairs(dimension, pair(birth, death)) -- the - persistence of the complex. - """ - if self.pcohptr != NULL: - del self.pcohptr - if self.thisptr != NULL: - self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True) - cdef vector[pair[int, pair[double, double]]] persistence_result - if self.pcohptr != NULL: - persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) - return persistence_result - - def betti_numbers(self): - """This function returns the Betti numbers of the complex. - - :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). - - :note: betti_numbers function requires persistence function to be - launched first. - - :note: betti_numbers function always returns [1, 0, 0, ...] as infinity - filtration cubes are not removed from the complex. - """ - cdef vector[int] bn_result - if self.pcohptr != NULL: - bn_result = self.pcohptr.betti_numbers() - return bn_result - - def persistent_betti_numbers(self, from_value, to_value): - """This function returns the persistent Betti numbers of the complex. - - :param from_value: The persistence birth limit to be added in the - numbers (persistent birth <= from_value). - :type from_value: float. - :param to_value: The persistence death limit to be added in the - numbers (persistent death > to_value). - :type to_value: float. - - :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., - Bn]). - - :note: persistent_betti_numbers function requires persistence - function to be launched first. - """ - cdef vector[int] pbn_result - if self.pcohptr != NULL: - pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) - return pbn_result - - def persistence_intervals_in_dimension(self, dimension): - """This function returns the persistence intervals of the complex in a - specific dimension. - - :param dimension: The specific dimension. - :type dimension: int. - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - - :note: intervals_in_dim function requires persistence function to be - launched first. - """ - cdef vector[pair[double,double]] intervals_result - if self.pcohptr != NULL: - intervals_result = self.pcohptr.intervals_in_dimension(dimension) - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") - return np_array(intervals_result) diff --git a/src/cython/cython/euclidean_strong_witness_complex.pyx b/src/cython/cython/euclidean_strong_witness_complex.pyx deleted file mode 100644 index 26bd8375..00000000 --- a/src/cython/cython/euclidean_strong_witness_complex.pyx +++ /dev/null @@ -1,85 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface": - Euclidean_strong_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - vector[double] get_point(unsigned vertex) - -# EuclideanStrongWitnessComplex python interface -cdef class EuclideanStrongWitnessComplex: - """Constructs strong witness complex for given sets of witnesses and - landmarks in Euclidean space. - """ - - cdef Euclidean_strong_witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, landmarks=None, witnesses=None): - """WitnessComplex constructor. - - :param landmarks: A list of landmarks (in the point cloud). - :type landmarks: list of list of double - - :param witnesses: The point cloud. - :type witnesses: list of list of double - """ - - # The real cython constructor - def __cinit__(self, landmarks=None, witnesses=None): - if landmarks is not None and witnesses is not None: - self.thisptr = new Euclidean_strong_witness_complex_interface(landmarks, witnesses) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if WitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): - """ - :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) - return simplex_tree - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int. - :returns: The point. - :rtype: list of float - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - diff --git a/src/cython/cython/euclidean_witness_complex.pyx b/src/cython/cython/euclidean_witness_complex.pyx deleted file mode 100644 index e687c6f3..00000000 --- a/src/cython/cython/euclidean_witness_complex.pyx +++ /dev/null @@ -1,85 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface": - Euclidean_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - vector[double] get_point(unsigned vertex) - -# EuclideanWitnessComplex python interface -cdef class EuclideanWitnessComplex: - """Constructs (weak) witness complex for given sets of witnesses and - landmarks in Euclidean space. - """ - - cdef Euclidean_witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, landmarks=None, witnesses=None): - """WitnessComplex constructor. - - :param landmarks: A list of landmarks (in the point cloud). - :type landmarks: list of list of double - - :param witnesses: The point cloud. - :type witnesses: list of list of double - """ - - # The real cython constructor - def __cinit__(self, landmarks=None, witnesses=None): - if landmarks is not None and witnesses is not None: - self.thisptr = new Euclidean_witness_complex_interface(landmarks, witnesses) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if WitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): - """ - :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) - return simplex_tree - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int. - :returns: The point. - :rtype: list of float - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - diff --git a/src/cython/cython/nerve_gic.pyx b/src/cython/cython/nerve_gic.pyx deleted file mode 100644 index 3c8f1200..00000000 --- a/src/cython/cython/nerve_gic.pyx +++ /dev/null @@ -1,407 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" - -cdef extern from "Nerve_gic_interface.h" namespace "Gudhi": - cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface": - Nerve_gic_interface() - double compute_confidence_level_from_distance(double distance) - double compute_distance_from_confidence_level(double alpha) - void compute_distribution(int N) - double compute_p_value() - vector[pair[double, double]] compute_PD() - void find_simplices() - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) - bool read_point_cloud(string off_file_name) - double set_automatic_resolution() - void set_color_from_coordinate(int k) - void set_color_from_file(string color_file_name) - void set_color_from_range(vector[double] color) - void set_cover_from_file(string cover_file_name) - void set_cover_from_function() - void set_cover_from_Euclidean_Voronoi(int m) - void set_function_from_coordinate(int k) - void set_function_from_file(string func_file_name) - void set_function_from_range(vector[double] function) - void set_gain(double g) - double set_graph_from_automatic_euclidean_rips(int N) - void set_graph_from_file(string graph_file_name) - void set_graph_from_OFF() - void set_graph_from_euclidean_rips(double threshold) - void set_mask(int nodemask) - void set_resolution_with_interval_length(double resolution) - void set_resolution_with_interval_number(int resolution) - void set_subsampling(double constant, double power) - void set_type(string type) - void set_verbose(bool verbose) - vector[int] subpopulation(int c) - void write_info() - void plot_DOT() - void plot_OFF() - void set_point_cloud_from_range(vector[vector[double]] cloud) - void set_distances_from_range(vector[vector[double]] distance_matrix) - -# CoverComplex python interface -cdef class CoverComplex: - """Cover complex data structure. - - The data structure is a simplicial complex, representing a Graph Induced - simplicial Complex (GIC) or a Nerve, and whose simplices are computed with - a cover C of a point cloud P, which often comes from the preimages of - intervals covering the image of a function f defined on P. These intervals - are parameterized by their resolution (either their length or their number) - and their gain (percentage of overlap). To compute a GIC, one also needs a - graph G built on top of P, whose cliques with vertices belonging to - different elements of C correspond to the simplices of the GIC. - """ - - cdef Nerve_gic_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self): - """CoverComplex constructor. - """ - - # The real cython constructor - def __cinit__(self): - self.thisptr = new Nerve_gic_interface() - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if CoverComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def set_point_cloud_from_range(self, cloud): - """ Reads and stores the input point cloud from a vector stored in memory. - - :param cloud: Input vector containing the point cloud. - :type cloud: vector[vector[double]] - """ - return self.thisptr.set_point_cloud_from_range(cloud) - - def set_distances_from_range(self, distance_matrix): - """ Reads and stores the input distance matrix from a vector stored in memory. - - :param distance_matrix: Input vector containing the distance matrix. - :type distance_matrix: vector[vector[double]] - """ - return self.thisptr.set_distances_from_range(distance_matrix) - - def compute_confidence_level_from_distance(self, distance): - """Computes the confidence level of a specific bottleneck distance - threshold. - - :param distance: Bottleneck distance. - :type distance: double - :rtype: double - :returns: Confidence level. - """ - return self.thisptr.compute_confidence_level_from_distance(distance) - - def compute_distance_from_confidence_level(self, alpha): - """Computes the bottleneck distance threshold corresponding to a - specific confidence level. - - :param alpha: Confidence level. - :type alpha: double - :rtype: double - :returns: Bottleneck distance. - """ - return self.thisptr.compute_distance_from_confidence_level(alpha) - - def compute_distribution(self, N=100): - """Computes bootstrapped distances distribution. - - :param N: Loop number (default value is 100). - :type alpha: int - """ - self.thisptr.compute_distribution(N) - - def compute_p_value(self): - """Computes the p-value, i.e. the opposite of the confidence level of - the largest bottleneck distance preserving the points in the - persistence diagram of the output simplicial complex. - - :rtype: double - :returns: p-value. - """ - return self.thisptr.compute_p_value() - - def compute_PD(self): - """Computes the extended persistence diagram of the complex. - """ - return self.thisptr.compute_PD() - - def create_simplex_tree(self): - """ - :returns: A simplex tree created from the Cover complex. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - self.thisptr.create_simplex_tree(simplex_tree.thisptr) - return simplex_tree - - def find_simplices(self): - """Computes the simplices of the simplicial complex. - """ - self.thisptr.find_simplices() - - def read_point_cloud(self, off_file): - """Reads and stores the input point cloud from .(n)OFF file. - - :param off_file: Name of the input .OFF or .nOFF file. - :type off_file: string - :rtype: bool - :returns: Read file status. - """ - if os.path.isfile(off_file): - return self.thisptr.read_point_cloud(str.encode(off_file)) - else: - print("file " + off_file + " not found.") - return False - - def set_automatic_resolution(self): - """Computes the optimal length of intervals (i.e. the smallest interval - length avoiding discretization artifacts—see :cite:`Carriere17c`) for a - functional cover. - - :rtype: double - :returns: reso interval length used to compute the cover. - """ - return self.thisptr.set_automatic_resolution() - - def set_color_from_coordinate(self, k=0): - """Computes the function used to color the nodes of the simplicial - complex from the k-th coordinate. - - :param k: Coordinate to use (start at 0). Default value is 0. - :type k: int - """ - return self.thisptr.set_color_from_coordinate(k) - - def set_color_from_file(self, color_file_name): - """Computes the function used to color the nodes of the simplicial - complex from a file containing the function values. - - :param color_file_name: Name of the input color file. - :type color_file_name: string - """ - if os.path.isfile(color_file_name): - self.thisptr.set_color_from_file(str.encode(color_file_name)) - else: - print("file " + color_file_name + " not found.") - - def set_color_from_range(self, color): - """Computes the function used to color the nodes of the simplicial - complex from a vector stored in memory. - - :param color: Input vector of values. - :type color: vector[double] - """ - self.thisptr.set_color_from_range(color) - - def set_cover_from_file(self, cover_file_name): - """Creates the cover C from a file containing the cover elements of - each point (the order has to be the same as in the input file!). - - :param cover_file_name: Name of the input cover file. - :type cover_file_name: string - """ - if os.path.isfile(cover_file_name): - self.thisptr.set_cover_from_file(str.encode(cover_file_name)) - else: - print("file " + cover_file_name + " not found.") - - def set_cover_from_function(self): - """Creates a cover C from the preimages of the function f. - """ - self.thisptr.set_cover_from_function() - - def set_cover_from_Voronoi(self, m=100): - """Creates the cover C from the Voronoï cells of a subsampling of the - point cloud. - - :param m: Number of points in the subsample. Default value is 100. - :type m: int - """ - self.thisptr.set_cover_from_Euclidean_Voronoi(m) - - def set_function_from_coordinate(self, k): - """Creates the function f from the k-th coordinate of the point cloud. - - :param k: Coordinate to use (start at 0). - :type k: int - """ - self.thisptr.set_function_from_coordinate(k) - - def set_function_from_file(self, func_file_name): - """Creates the function f from a file containing the function values. - - :param func_file_name: Name of the input function file. - :type func_file_name: string - """ - if os.path.isfile(func_file_name): - self.thisptr.set_function_from_file(str.encode(func_file_name)) - else: - print("file " + func_file_name + " not found.") - - def set_function_from_range(self, function): - """Creates the function f from a vector stored in memory. - - :param function: Input vector of values. - :type function: vector[double] - """ - self.thisptr.set_function_from_range(function) - - def set_gain(self, g = 0.3): - """Sets a gain from a value stored in memory. - - :param g: Gain (default value is 0.3). - :type g: double - """ - self.thisptr.set_gain(g) - - def set_graph_from_automatic_rips(self, N=100): - """Creates a graph G from a Rips complex whose threshold value is - automatically tuned with subsampling—see. - - :param N: Number of subsampling iteration (the default reasonable value - is 100, but there is no guarantee on how to choose it). - :type N: int - :rtype: double - :returns: Delta threshold used for computing the Rips complex. - """ - return self.thisptr.set_graph_from_automatic_euclidean_rips(N) - - def set_graph_from_file(self, graph_file_name): - """Creates a graph G from a file containing the edges. - - :param graph_file_name: Name of the input graph file. The graph file - contains one edge per line, each edge being represented by the IDs of - its two nodes. - :type graph_file_name: string - """ - if os.path.isfile(graph_file_name): - self.thisptr.set_graph_from_file(str.encode(graph_file_name)) - else: - print("file " + graph_file_name + " not found.") - - def set_graph_from_OFF(self): - """Creates a graph G from the triangulation given by the input OFF - file. - """ - self.thisptr.set_graph_from_OFF() - - def set_graph_from_rips(self, threshold): - """Creates a graph G from a Rips complex. - - :param threshold: Threshold value for the Rips complex. - :type threshold: double - """ - self.thisptr.set_graph_from_euclidean_rips(threshold) - - def set_mask(self, nodemask): - """Sets the mask, which is a threshold integer such that nodes in the - complex that contain a number of data points which is less than or - equal to this threshold are not displayed. - - :param nodemask: Threshold. - :type nodemask: int - """ - self.thisptr.set_mask(nodemask) - - def set_resolution_with_interval_length(self, resolution): - """Sets a length of intervals from a value stored in memory. - - :param resolution: Length of intervals. - :type resolution: double - """ - self.thisptr.set_resolution_with_interval_length(resolution) - - def set_resolution_with_interval_number(self, resolution): - """Sets a number of intervals from a value stored in memory. - - :param resolution: Number of intervals. - :type resolution: int - """ - self.thisptr.set_resolution_with_interval_number(resolution) - - def set_subsampling(self, constant, power): - """Sets the constants used to subsample the data set. These constants - are explained in :cite:`Carriere17c`. - - :param constant: Constant. - :type constant: double - :param power: Power. - :type resolution: double - """ - self.thisptr.set_subsampling(constant, power) - - def set_type(self, type): - """Specifies whether the type of the output simplicial complex. - - :param type: either "GIC" or "Nerve". - :type type: string - """ - self.thisptr.set_type(str.encode(type)) - - def set_verbose(self, verbose): - """Specifies whether the program should display information or not. - - :param verbose: true = display info, false = do not display info. - :type verbose: boolean - """ - self.thisptr.set_verbose(verbose) - - def subpopulation(self, c): - """Returns the data subset corresponding to a specific node of the - created complex. - - :param c: ID of the node. - :type c: int - :rtype: vector[int] - :returns: Vector of IDs of data points. - """ - return self.thisptr.subpopulation(c) - - def write_info(self): - """Creates a .txt file called SC.txt describing the 1-skeleton, which can - then be plotted with e.g. KeplerMapper. - """ - return self.thisptr.write_info() - - def plot_dot(self): - """Creates a .dot file called SC.dot for neato (part of the graphviz - package) once the simplicial complex is computed to get a visualization of - its 1-skeleton in a .pdf file. - """ - return self.thisptr.plot_DOT() - - def plot_off(self): - """Creates a .off file called SC.off for 3D visualization, which contains - the 2-skeleton of the GIC. This function assumes that the cover has been - computed with Voronoi. If data points are in 1D or 2D, the remaining - coordinates of the points embedded in 3D are set to 0. - """ - return self.thisptr.plot_OFF() diff --git a/src/cython/cython/off_reader.pyx b/src/cython/cython/off_reader.pyx deleted file mode 100644 index 9efd97ff..00000000 --- a/src/cython/cython/off_reader.pyx +++ /dev/null @@ -1,38 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.string cimport string -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Off_reader_interface.h" namespace "Gudhi": - vector[vector[double]] read_points_from_OFF_file(string off_file) - -def read_off(off_file=''): - """Read points from OFF file. - - :param off_file: An OFF file style name. - :type off_file: string - - :returns: The point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - return read_points_from_OFF_file(str.encode(off_file)) - else: - print("file " + off_file + " not found.") - return [] - diff --git a/src/cython/cython/periodic_cubical_complex.pyx b/src/cython/cython/periodic_cubical_complex.pyx deleted file mode 100644 index 724fadd4..00000000 --- a/src/cython/cython/periodic_cubical_complex.pyx +++ /dev/null @@ -1,190 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -from numpy import array as np_array - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": - cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface>": - Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells, vector[bool] periodic_dimensions) - Periodic_cubical_complex_base_interface(string perseus_file) - int num_simplices() - int dimension() - -cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Periodic_cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>>": - Periodic_cubical_complex_persistence_interface(Periodic_cubical_complex_base_interface * st, bool persistence_dim_max) - vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) - vector[int] betti_numbers() - vector[int] persistent_betti_numbers(double from_value, double to_value) - vector[pair[double,double]] intervals_in_dimension(int dimension) - -# PeriodicCubicalComplex python interface -cdef class PeriodicCubicalComplex: - """The PeriodicCubicalComplex is an example of a structured complex useful - in computational mathematics (specially rigorous numerics) and image - analysis. - """ - cdef Periodic_cubical_complex_base_interface * thisptr - - cdef Periodic_cubical_complex_persistence_interface * pcohptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, dimensions=None, top_dimensional_cells=None, - periodic_dimensions=None, perseus_file=''): - """PeriodicCubicalComplex constructor from dimensions and - top_dimensional_cells or from a Perseus-style file name. - - :param dimensions: A list of number of top dimensional cells. - :type dimensions: list of int - :param top_dimensional_cells: A list of cells filtration values. - :type top_dimensional_cells: list of double - :param periodic_dimensions: A list of top dimensional cells periodicity value. - :type periodic_dimensions: list of boolean - - Or - - :param perseus_file: A Perseus-style file name. - :type perseus_file: string - """ - - # The real cython constructor - def __cinit__(self, dimensions=None, top_dimensional_cells=None, - periodic_dimensions=None, perseus_file=''): - if (dimensions is not None) and (top_dimensional_cells is not None) and (periodic_dimensions is not None) and (perseus_file is ''): - self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells, periodic_dimensions) - elif (dimensions is None) and (top_dimensional_cells is None) and (periodic_dimensions is None) and (perseus_file is not ''): - if os.path.isfile(perseus_file): - self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file)) - else: - print("file " + perseus_file + " not found.") - else: - print("CubicalComplex can be constructed from dimensions and " - "top_dimensional_cells or from a Perseus-style file name.") - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - if self.pcohptr != NULL: - del self.pcohptr - - def __is_defined(self): - """Returns true if PeriodicCubicalComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def __is_persistence_defined(self): - """Returns true if Persistence pointer is not NULL. - """ - return self.pcohptr != NULL - - def num_simplices(self): - """This function returns the number of all cubes in the complex. - - :returns: int -- the number of all cubes in the complex. - """ - return self.thisptr.num_simplices() - - def dimension(self): - """This function returns the dimension of the complex. - - :returns: int -- the complex dimension. - """ - return self.thisptr.dimension() - - def persistence(self, homology_coeff_field=11, min_persistence=0): - """This function returns the persistence of the complex. - - :param homology_coeff_field: The homology coefficient field. Must be a - prime number - :type homology_coeff_field: int. - :param min_persistence: The minimum persistence value to take into - account (strictly greater than min_persistence). Default value is - 0.0. - Sets min_persistence to -1.0 to see all values. - :type min_persistence: float. - :returns: list of pairs(dimension, pair(birth, death)) -- the - persistence of the complex. - """ - if self.pcohptr != NULL: - del self.pcohptr - if self.thisptr != NULL: - self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True) - cdef vector[pair[int, pair[double, double]]] persistence_result - if self.pcohptr != NULL: - persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) - return persistence_result - - def betti_numbers(self): - """This function returns the Betti numbers of the complex. - - :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). - - :note: betti_numbers function requires persistence function to be - launched first. - - :note: betti_numbers function always returns [1, 0, 0, ...] as infinity - filtration cubes are not removed from the complex. - """ - cdef vector[int] bn_result - if self.pcohptr != NULL: - bn_result = self.pcohptr.betti_numbers() - return bn_result - - def persistent_betti_numbers(self, from_value, to_value): - """This function returns the persistent Betti numbers of the complex. - - :param from_value: The persistence birth limit to be added in the - numbers (persistent birth <= from_value). - :type from_value: float. - :param to_value: The persistence death limit to be added in the - numbers (persistent death > to_value). - :type to_value: float. - - :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., - Bn]). - - :note: persistent_betti_numbers function requires persistence - function to be launched first. - """ - cdef vector[int] pbn_result - if self.pcohptr != NULL: - pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) - return pbn_result - - def persistence_intervals_in_dimension(self, dimension): - """This function returns the persistence intervals of the complex in a - specific dimension. - - :param dimension: The specific dimension. - :type dimension: int. - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - - :note: intervals_in_dim function requires persistence function to be - launched first. - """ - cdef vector[pair[double,double]] intervals_result - if self.pcohptr != NULL: - intervals_result = self.pcohptr.intervals_in_dimension(dimension) - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") - return np_array(intervals_result) diff --git a/src/cython/cython/persistence_graphical_tools.py b/src/cython/cython/persistence_graphical_tools.py deleted file mode 100644 index 34803222..00000000 --- a/src/cython/cython/persistence_graphical_tools.py +++ /dev/null @@ -1,420 +0,0 @@ -from os import path -from math import isfinite -import numpy as np - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau, Bertrand Michel - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau, Bertrand Michel" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def __min_birth_max_death(persistence, band=0.0): - """This function returns (min_birth, max_death) from the persistence. - - :param persistence: The persistence to plot. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param band: band - :type band: float. - :returns: (float, float) -- (min_birth, max_death). - """ - # Look for minimum birth date and maximum death date for plot optimisation - max_death = 0 - min_birth = persistence[0][1][0] - for interval in reversed(persistence): - if float(interval[1][1]) != float("inf"): - if float(interval[1][1]) > max_death: - max_death = float(interval[1][1]) - if float(interval[1][0]) > max_death: - max_death = float(interval[1][0]) - if float(interval[1][0]) < min_birth: - min_birth = float(interval[1][0]) - if band > 0.0: - max_death += band - return (min_birth, max_death) - - -""" -Only 13 colors for the palette -""" -palette = [ - "#ff0000", - "#00ff00", - "#0000ff", - "#00ffff", - "#ff00ff", - "#ffff00", - "#000000", - "#880000", - "#008800", - "#000088", - "#888800", - "#880088", - "#008888", -] - - -def plot_persistence_barcode( - persistence=[], - persistence_file="", - alpha=0.6, - max_intervals=1000, - max_barcodes=1000, - inf_delta=0.1, - legend=False, -): - """This function plots the persistence bar code from persistence values list - or from a :doc:`persistence file `. - - :param persistence: Persistence intervals values list grouped by dimension. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param persistence_file: A :doc:`persistence file ` style name - (reset persistence if both are set). - :type persistence_file: string - :param alpha: barcode transparency value (0.0 transparent through 1.0 - opaque - default is 0.6). - :type alpha: float. - :param max_intervals: maximal number of intervals to display. - Selected intervals are those with the longest life time. Set it - to 0 to see all. Default value is 1000. - :type max_intervals: int. - :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x - inf_delta)` above :code:`max_death` value. A reasonable value is - between 0.05 and 0.5 - default is 0.1. - :type inf_delta: float. - :param legend: Display the dimension color legend (default is False). - :type legend: boolean. - :returns: A matplotlib object containing horizontal bar plot of persistence - (launch `show()` method on it to display it). - """ - try: - import matplotlib.pyplot as plt - import matplotlib.patches as mpatches - - if persistence_file is not "": - if path.isfile(persistence_file): - # Reset persistence - persistence = [] - diag = read_persistence_intervals_grouped_by_dimension( - persistence_file=persistence_file - ) - for key in diag.keys(): - for persistence_interval in diag[key]: - persistence.append((key, persistence_interval)) - else: - print("file " + persistence_file + " not found.") - return None - - if max_barcodes is not 1000: - print("Deprecated parameter. It has been replaced by max_intervals") - max_intervals = max_barcodes - - if max_intervals > 0 and max_intervals < len(persistence): - # Sort by life time, then takes only the max_intervals elements - persistence = sorted( - persistence, - key=lambda life_time: life_time[1][1] - life_time[1][0], - reverse=True, - )[:max_intervals] - - persistence = sorted(persistence, key=lambda birth: birth[1][0]) - - (min_birth, max_death) = __min_birth_max_death(persistence) - ind = 0 - delta = (max_death - min_birth) * inf_delta - # Replace infinity values with max_death + delta for bar code to be more - # readable - infinity = max_death + delta - axis_start = min_birth - delta - # Draw horizontal bars in loop - for interval in reversed(persistence): - if float(interval[1][1]) != float("inf"): - # Finite death case - plt.barh( - ind, - (interval[1][1] - interval[1][0]), - height=0.8, - left=interval[1][0], - alpha=alpha, - color=palette[interval[0]], - linewidth=0, - ) - else: - # Infinite death case for diagram to be nicer - plt.barh( - ind, - (infinity - interval[1][0]), - height=0.8, - left=interval[1][0], - alpha=alpha, - color=palette[interval[0]], - linewidth=0, - ) - ind = ind + 1 - - if legend: - dimensions = list(set(item[0] for item in persistence)) - plt.legend( - handles=[ - mpatches.Patch(color=palette[dim], label=str(dim)) - for dim in dimensions - ], - loc="lower right", - ) - plt.title("Persistence barcode") - # Ends plot on infinity value and starts a little bit before min_birth - plt.axis([axis_start, infinity, 0, ind]) - return plt - - except ImportError: - print("This function is not available, you may be missing matplotlib.") - - -def plot_persistence_diagram( - persistence=[], - persistence_file="", - alpha=0.6, - band=0.0, - max_intervals=1000, - max_plots=1000, - inf_delta=0.1, - legend=False, -): - """This function plots the persistence diagram from persistence values - list or from a :doc:`persistence file `. - - :param persistence: Persistence intervals values list grouped by dimension. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param persistence_file: A :doc:`persistence file ` style name - (reset persistence if both are set). - :type persistence_file: string - :param alpha: plot transparency value (0.0 transparent through 1.0 - opaque - default is 0.6). - :type alpha: float. - :param band: band (not displayed if :math:`\leq` 0. - default is 0.) - :type band: float. - :param max_intervals: maximal number of intervals to display. - Selected intervals are those with the longest life time. Set it - to 0 to see all. Default value is 1000. - :type max_intervals: int. - :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x - inf_delta)` above :code:`max_death` value. A reasonable value is - between 0.05 and 0.5 - default is 0.1. - :type inf_delta: float. - :param legend: Display the dimension color legend (default is False). - :type legend: boolean. - :returns: A matplotlib object containing diagram plot of persistence - (launch `show()` method on it to display it). - """ - try: - import matplotlib.pyplot as plt - import matplotlib.patches as mpatches - - if persistence_file is not "": - if path.isfile(persistence_file): - # Reset persistence - persistence = [] - diag = read_persistence_intervals_grouped_by_dimension( - persistence_file=persistence_file - ) - for key in diag.keys(): - for persistence_interval in diag[key]: - persistence.append((key, persistence_interval)) - else: - print("file " + persistence_file + " not found.") - return None - - if max_plots is not 1000: - print("Deprecated parameter. It has been replaced by max_intervals") - max_intervals = max_plots - - if max_intervals > 0 and max_intervals < len(persistence): - # Sort by life time, then takes only the max_intervals elements - persistence = sorted( - persistence, - key=lambda life_time: life_time[1][1] - life_time[1][0], - reverse=True, - )[:max_intervals] - - (min_birth, max_death) = __min_birth_max_death(persistence, band) - delta = (max_death - min_birth) * inf_delta - # Replace infinity values with max_death + delta for diagram to be more - # readable - infinity = max_death + delta - axis_start = min_birth - delta - - # line display of equation : birth = death - x = np.linspace(axis_start, infinity, 1000) - # infinity line and text - plt.plot(x, x, color="k", linewidth=1.0) - plt.plot(x, [infinity] * len(x), linewidth=1.0, color="k", alpha=alpha) - plt.text(axis_start, infinity, r"$\infty$", color="k", alpha=alpha) - # bootstrap band - if band > 0.0: - plt.fill_between(x, x, x + band, alpha=alpha, facecolor="red") - - # Draw points in loop - for interval in reversed(persistence): - if float(interval[1][1]) != float("inf"): - # Finite death case - plt.scatter( - interval[1][0], - interval[1][1], - alpha=alpha, - color=palette[interval[0]], - ) - else: - # Infinite death case for diagram to be nicer - plt.scatter( - interval[1][0], infinity, alpha=alpha, color=palette[interval[0]] - ) - - if legend: - dimensions = list(set(item[0] for item in persistence)) - plt.legend( - handles=[ - mpatches.Patch(color=palette[dim], label=str(dim)) - for dim in dimensions - ] - ) - - plt.title("Persistence diagram") - plt.xlabel("Birth") - plt.ylabel("Death") - # Ends plot on infinity value and starts a little bit before min_birth - plt.axis([axis_start, infinity, axis_start, infinity + delta]) - return plt - - except ImportError: - print("This function is not available, you may be missing matplotlib.") - - -def plot_persistence_density( - persistence=[], - persistence_file="", - nbins=300, - bw_method=None, - max_intervals=1000, - dimension=None, - cmap=None, - legend=False, -): - """This function plots the persistence density from persistence - values list or from a :doc:`persistence file `. Be - aware that this function does not distinguish the dimension, it is - up to you to select the required one. This function also does not handle - degenerate data set (scipy correlation matrix inversion can fail). - - :param persistence: Persistence intervals values list grouped by dimension. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param persistence_file: A :doc:`persistence file ` - style name (reset persistence if both are set). - :type persistence_file: string - :param nbins: Evaluate a gaussian kde on a regular grid of nbins x - nbins over data extents (default is 300) - :type nbins: int. - :param bw_method: The method used to calculate the estimator - bandwidth. This can be 'scott', 'silverman', a scalar constant - or a callable. If a scalar, this will be used directly as - kde.factor. If a callable, it should take a gaussian_kde - instance as only parameter and return a scalar. If None - (default), 'scott' is used. See - `scipy.stats.gaussian_kde documentation - `_ - for more details. - :type bw_method: str, scalar or callable, optional. - :param max_intervals: maximal number of points used in the density - estimation. - Selected intervals are those with the longest life time. Set it - to 0 to see all. Default value is 1000. - :type max_intervals: int. - :param dimension: the dimension to be selected in the intervals - (default is None to mix all dimensions). - :type dimension: int. - :param cmap: A matplotlib colormap (default is - matplotlib.pyplot.cm.hot_r). - :type cmap: cf. matplotlib colormap. - :param legend: Display the color bar values (default is False). - :type legend: boolean. - :returns: A matplotlib object containing diagram plot of persistence - (launch `show()` method on it to display it). - """ - try: - import matplotlib.pyplot as plt - from scipy.stats import kde - - if persistence_file is not "": - if dimension is None: - # All dimension case - dimension = -1 - if path.isfile(persistence_file): - persistence_dim = read_persistence_intervals_in_dimension( - persistence_file=persistence_file, only_this_dim=dimension - ) - print(persistence_dim) - else: - print("file " + persistence_file + " not found.") - return None - - if len(persistence) > 0: - persistence_dim = np.array( - [ - (dim_interval[1][0], dim_interval[1][1]) - for dim_interval in persistence - if (dim_interval[0] == dimension) or (dimension is None) - ] - ) - - persistence_dim = persistence_dim[np.isfinite(persistence_dim[:, 1])] - if max_intervals > 0 and max_intervals < len(persistence_dim): - # Sort by life time, then takes only the max_intervals elements - persistence_dim = np.array( - sorted( - persistence_dim, - key=lambda life_time: life_time[1] - life_time[0], - reverse=True, - )[:max_intervals] - ) - - # Set as numpy array birth and death (remove undefined values - inf and NaN) - birth = persistence_dim[:, 0] - death = persistence_dim[:, 1] - - # line display of equation : birth = death - x = np.linspace(death.min(), birth.max(), 1000) - plt.plot(x, x, color="k", linewidth=1.0) - - # Evaluate a gaussian kde on a regular grid of nbins x nbins over data extents - k = kde.gaussian_kde([birth, death], bw_method=bw_method) - xi, yi = np.mgrid[ - birth.min() : birth.max() : nbins * 1j, - death.min() : death.max() : nbins * 1j, - ] - zi = k(np.vstack([xi.flatten(), yi.flatten()])) - - # default cmap value cannot be done at argument definition level as matplotlib is not yet defined. - if cmap is None: - cmap = plt.cm.hot_r - # Make the plot - plt.pcolormesh(xi, yi, zi.reshape(xi.shape), cmap=cmap) - - if legend: - plt.colorbar() - - plt.title("Persistence density") - plt.xlabel("Birth") - plt.ylabel("Death") - return plt - - except ImportError: - print( - "This function is not available, you may be missing matplotlib and/or scipy." - ) diff --git a/src/cython/cython/reader_utils.pyx b/src/cython/cython/reader_utils.pyx deleted file mode 100644 index 147fae71..00000000 --- a/src/cython/cython/reader_utils.pyx +++ /dev/null @@ -1,87 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.string cimport string -from libcpp.map cimport map -from libcpp.pair cimport pair - -from os import path -from numpy import array as np_array - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2017 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2017 Inria" -__license__ = "MIT" - -cdef extern from "Reader_utils_interface.h" namespace "Gudhi": - vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator) - map[int, vector[pair[double, double]]] read_pers_intervals_grouped_by_dimension(string filename) - vector[pair[double, double]] read_pers_intervals_in_dimension(string filename, int only_this_dim) - -def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'): - """Read lower triangular matrix from a CSV style file. - - :param csv_file: A CSV file style name. - :type csv_file: string - :param separator: The value separator in the CSV file. Default value is ';' - :type separator: char - - :returns: The lower triangular matrix. - :rtype: vector[vector[double]] - """ - if csv_file is not '': - if path.isfile(csv_file): - return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0])) - print("file " + csv_file + " not set or not found.") - return [] - -def read_persistence_intervals_grouped_by_dimension(persistence_file=''): - """Reads a file containing persistence intervals. - Each line might contain 2, 3 or 4 values: [[field] dimension] birth death - The return value is an `map[dim, vector[pair[birth, death]]]` - where `dim` is an `int`, `birth` a `double`, and `death` a `double`. - Note: the function does not check that birth <= death. - - :param persistence_file: A persistence file style name. - :type persistence_file: string - - :returns: The persistence pairs grouped by dimension. - :rtype: map[int, vector[pair[double, double]]] - """ - if persistence_file is not '': - if path.isfile(persistence_file): - return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file)) - print("file " + persistence_file + " not set or not found.") - return [] - -def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-1): - """Reads a file containing persistence intervals. - Each line of persistence_file might contain 2, 3 or 4 values: - [[field] dimension] birth death - Note: the function does not check that birth <= death. - - :param persistence_file: A persistence file style name. - :type persistence_file: string - :param only_this_dim: The specific dimension. Default value is -1. - If `only_this_dim` = -1, dimension is ignored and all lines are returned. - If `only_this_dim` is >= 0, only the lines where dimension = - `only_this_dim` (or where dimension is not specified) are returned. - :type only_this_dim: int. - - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - """ - if persistence_file is not '': - if path.isfile(persistence_file): - return np_array(read_pers_intervals_in_dimension(str.encode( - persistence_file), only_this_dim)) - print("file " + persistence_file + " not set or not found.") - return [] diff --git a/src/cython/cython/rips_complex.pyx b/src/cython/cython/rips_complex.pyx deleted file mode 100644 index b9a2331f..00000000 --- a/src/cython/cython/rips_complex.pyx +++ /dev/null @@ -1,98 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Rips_complex_interface.h" namespace "Gudhi": - cdef cppclass Rips_complex_interface "Gudhi::rips_complex::Rips_complex_interface": - Rips_complex_interface() - void init_points(vector[vector[double]] values, double threshold) - void init_matrix(vector[vector[double]] values, double threshold) - void init_points_sparse(vector[vector[double]] values, double threshold, double sparse) - void init_matrix_sparse(vector[vector[double]] values, double threshold, double sparse) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max) - -# RipsComplex python interface -cdef class RipsComplex: - """The data structure is a one skeleton graph, or Rips graph, containing - edges when the edge length is less or equal to a given threshold. Edge - length is computed from a user given point cloud with a given distance - function, or a distance matrix. - """ - - cdef Rips_complex_interface thisref - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, points=None, distance_matrix=None, - max_edge_length=float('inf'), sparse=None): - """RipsComplex constructor. - - :param max_edge_length: Rips value. - :type max_edge_length: float - - :param points: A list of points in d-Dimension. - :type points: list of list of double - - Or - - :param distance_matrix: A distance matrix (full square or lower - triangular). - :type points: list of list of double - - And in both cases - - :param sparse: If this is not None, it switches to building a sparse - Rips and represents the approximation parameter epsilon. - :type sparse: float - """ - - # The real cython constructor - def __cinit__(self, points=None, distance_matrix=None, - max_edge_length=float('inf'), sparse=None): - if sparse is not None: - if distance_matrix is not None: - self.thisref.init_matrix_sparse(distance_matrix, - max_edge_length, - sparse) - else: - if points is None: - # Empty Rips construction - points=[] - self.thisref.init_points_sparse(points, max_edge_length, sparse) - else: - if distance_matrix is not None: - self.thisref.init_matrix(distance_matrix, max_edge_length) - else: - if points is None: - # Empty Rips construction - points=[] - self.thisref.init_points(points, max_edge_length) - - - def create_simplex_tree(self, max_dimension=1): - """ - :param max_dimension: graph expansion for rips until this given maximal - dimension. - :type max_dimension: int - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - self.thisref.create_simplex_tree(simplex_tree.thisptr, max_dimension) - return simplex_tree diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx deleted file mode 100644 index 8e791c17..00000000 --- a/src/cython/cython/simplex_tree.pyx +++ /dev/null @@ -1,542 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp cimport bool -from libcpp.string cimport string - -from numpy import array as np_array - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": - cdef cppclass Simplex_tree_options_full_featured: - pass - - cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface": - Simplex_tree() - double simplex_filtration(vector[int] simplex) - void assign_simplex_filtration(vector[int] simplex, double filtration) - void initialize_filtration() - int num_vertices() - int num_simplices() - void set_dimension(int dimension) - int dimension() - int upper_bound_dimension() - bool find_simplex(vector[int] simplex) - bool insert_simplex_and_subfaces(vector[int] simplex, - double filtration) - vector[pair[vector[int], double]] get_filtration() - vector[pair[vector[int], double]] get_skeleton(int dimension) - vector[pair[vector[int], double]] get_star(vector[int] simplex) - vector[pair[vector[int], double]] get_cofaces(vector[int] simplex, - int dimension) - void expansion(int max_dim) - void remove_maximal_simplex(vector[int] simplex) - bool prune_above_filtration(double filtration) - bool make_filtration_non_decreasing() - -cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": - Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max) - vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) - vector[int] betti_numbers() - vector[int] persistent_betti_numbers(double from_value, double to_value) - vector[pair[double,double]] intervals_in_dimension(int dimension) - void write_output_diagram(string diagram_file_name) - vector[pair[vector[int], vector[int]]] persistence_pairs() - -# SimplexTree python interface -cdef class SimplexTree: - """The simplex tree is an efficient and flexible data structure for - representing general (filtered) simplicial complexes. The data structure - is described in Jean-Daniel Boissonnat and Clément Maria. The Simplex - Tree: An Efficient Data Structure for General Simplicial Complexes. - Algorithmica, pages 1–22, 2014. - - This class is a filtered, with keys, and non contiguous vertices version - of the simplex tree. - """ - cdef Simplex_tree_interface_full_featured * thisptr - - cdef Simplex_tree_persistence_interface * pcohptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self): - """SimplexTree constructor. - """ - - # The real cython constructor - def __cinit__(self): - self.thisptr = new Simplex_tree_interface_full_featured() - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - if self.pcohptr != NULL: - del self.pcohptr - - def __is_defined(self): - """Returns true if SimplexTree pointer is not NULL. - """ - return self.thisptr != NULL - - def __is_persistence_defined(self): - """Returns true if Persistence pointer is not NULL. - """ - return self.pcohptr != NULL - - def filtration(self, simplex): - """This function returns the filtration value for a given N-simplex in - this simplicial complex, or +infinity if it is not in the complex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :returns: The simplicial complex filtration value. - :rtype: float - """ - return self.thisptr.simplex_filtration(simplex) - - def assign_filtration(self, simplex, filtration): - """This function assigns the simplicial complex filtration value for a - given N-simplex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :param filtration: The simplicial complex filtration value. - :type filtration: float - """ - self.thisptr.assign_simplex_filtration(simplex, filtration) - - def initialize_filtration(self): - """This function initializes and sorts the simplicial complex - filtration vector. - - .. note:: - - This function must be launched before - :func:`persistence()`, - :func:`betti_numbers()`, - :func:`persistent_betti_numbers()`, - or :func:`get_filtration()` - after :func:`inserting` or - :func:`removing` - simplices. - """ - self.thisptr.initialize_filtration() - - def num_vertices(self): - """This function returns the number of vertices of the simplicial - complex. - - :returns: The simplicial complex number of vertices. - :rtype: int - """ - return self.thisptr.num_vertices() - - def num_simplices(self): - """This function returns the number of simplices of the simplicial - complex. - - :returns: the simplicial complex number of simplices. - :rtype: int - """ - return self.thisptr.num_simplices() - - def dimension(self): - """This function returns the dimension of the simplicial complex. - - :returns: the simplicial complex dimension. - :rtype: int - - .. note:: - - This function is not constant time because it can recompute - dimension if required (can be triggered by - :func:`remove_maximal_simplex()` - or - :func:`prune_above_filtration()` - methods). - """ - return self.thisptr.dimension() - - def upper_bound_dimension(self): - """This function returns a valid dimension upper bound of the - simplicial complex. - - :returns: an upper bound on the dimension of the simplicial complex. - :rtype: int - """ - return self.thisptr.upper_bound_dimension() - - def set_dimension(self, dimension): - """This function sets the dimension of the simplicial complex. - - :param dimension: The new dimension value. - :type dimension: int. - - .. note:: - - This function must be used with caution because it disables - dimension recomputation when required - (this recomputation can be triggered by - :func:`remove_maximal_simplex()` - or - :func:`prune_above_filtration()` - ). - """ - self.thisptr.set_dimension(dimension) - - def find(self, simplex): - """This function returns if the N-simplex was found in the simplicial - complex or not. - - :param simplex: The N-simplex to find, represented by a list of vertex. - :type simplex: list of int. - :returns: true if the simplex was found, false otherwise. - :rtype: bool - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - return self.thisptr.find_simplex(csimplex) - - def insert(self, simplex, filtration=0.0): - """This function inserts the given N-simplex and its subfaces with the - given filtration value (default value is '0.0'). If some of those - simplices are already present with a higher filtration value, their - filtration value is lowered. - - :param simplex: The N-simplex to insert, represented by a list of - vertex. - :type simplex: list of int. - :param filtration: The filtration value of the simplex. - :type filtration: float. - :returns: true if the simplex was not yet in the complex, false - otherwise (whatever its original filtration value). - :rtype: bool - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - return self.thisptr.insert_simplex_and_subfaces(csimplex, - filtration) - - def get_filtration(self): - """This function returns a list of all simplices with their given - filtration values. - - :returns: The simplices sorted by increasing filtration values. - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[pair[vector[int], double]] filtration \ - = self.thisptr.get_filtration() - ct = [] - for filtered_complex in filtration: - v = [] - for vertex in filtered_complex.first: - v.append(vertex) - ct.append((v, filtered_complex.second)) - return ct - - def get_skeleton(self, dimension): - """This function returns the (simplices of the) skeleton of a maximum - given dimension. - - :param dimension: The skeleton dimension value. - :type dimension: int. - :returns: The (simplices of the) skeleton of a maximum dimension. - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[pair[vector[int], double]] skeleton \ - = self.thisptr.get_skeleton(dimension) - ct = [] - for filtered_simplex in skeleton: - v = [] - for vertex in filtered_simplex.first: - v.append(vertex) - ct.append((v, filtered_simplex.second)) - return ct - - def get_star(self, simplex): - """This function returns the star of a given N-simplex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :returns: The (simplices of the) star of a simplex. - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - cdef vector[pair[vector[int], double]] star \ - = self.thisptr.get_star(csimplex) - ct = [] - for filtered_simplex in star: - v = [] - for vertex in filtered_simplex.first: - v.append(vertex) - ct.append((v, filtered_simplex.second)) - return ct - - def get_cofaces(self, simplex, codimension): - """This function returns the cofaces of a given N-simplex with a - given codimension. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :param codimension: The codimension. If codimension = 0, all cofaces - are returned (equivalent of get_star function) - :type codimension: int. - :returns: The (simplices of the) cofaces of a simplex - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - cdef vector[pair[vector[int], double]] cofaces \ - = self.thisptr.get_cofaces(csimplex, codimension) - ct = [] - for filtered_simplex in cofaces: - v = [] - for vertex in filtered_simplex.first: - v.append(vertex) - ct.append((v, filtered_simplex.second)) - return ct - - def remove_maximal_simplex(self, simplex): - """This function removes a given maximal N-simplex from the simplicial - complex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - - .. note:: - - Be aware that removing is shifting data in a flat_map - (:func:`initialize_filtration()` to be done). - - .. note:: - - The dimension of the simplicial complex may be lower after calling - remove_maximal_simplex than it was before. However, - :func:`upper_bound_dimension()` - method will return the old value, which - remains a valid upper bound. If you care, you can call - :func:`dimension()` - to recompute the exact dimension. - """ - self.thisptr.remove_maximal_simplex(simplex) - - def prune_above_filtration(self, filtration): - """Prune above filtration value given as parameter. - - :param filtration: Maximum threshold value. - :type filtration: float. - :returns: The filtration modification information. - :rtype: bool - - - .. note:: - - Some simplex tree functions require the filtration to be valid. - prune_above_filtration function is not launching - :func:`initialize_filtration()` - but returns the filtration modification - information. If the complex has changed , please call - :func:`initialize_filtration()` - to recompute it. - - .. note:: - - Note that the dimension of the simplicial complex may be lower - after calling - :func:`prune_above_filtration()` - than it was before. However, - :func:`upper_bound_dimension()` - will return the old value, which remains a - valid upper bound. If you care, you can call - :func:`dimension()` - method to recompute the exact dimension. - """ - return self.thisptr.prune_above_filtration(filtration) - - def expansion(self, max_dim): - """Expands the Simplex_tree containing only its one skeleton - until dimension max_dim. - - The expanded simplicial complex until dimension :math:`d` - attached to a graph :math:`G` is the maximal simplicial complex of - dimension at most :math:`d` admitting the graph :math:`G` as - :math:`1`-skeleton. - The filtration value assigned to a simplex is the maximal filtration - value of one of its edges. - - The Simplex_tree must contain no simplex of dimension bigger than - 1 when calling the method. - - :param max_dim: The maximal dimension. - :type max_dim: int. - """ - self.thisptr.expansion(max_dim) - - def make_filtration_non_decreasing(self): - """This function ensures that each simplex has a higher filtration - value than its faces by increasing the filtration values. - - :returns: True if any filtration value was modified, - False if the filtration was already non-decreasing. - :rtype: bool - - - .. note:: - - Some simplex tree functions require the filtration to be valid. - make_filtration_non_decreasing function is not launching - :func:`initialize_filtration()` - but returns the filtration modification - information. If the complex has changed , please call - :func:`initialize_filtration()` - to recompute it. - """ - return self.thisptr.make_filtration_non_decreasing() - - def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False): - """This function returns the persistence of the simplicial complex. - - :param homology_coeff_field: The homology coefficient field. Must be a - prime number. Default value is 11. - :type homology_coeff_field: int. - :param min_persistence: The minimum persistence value to take into - account (strictly greater than min_persistence). Default value is - 0.0. - Sets min_persistence to -1.0 to see all values. - :type min_persistence: float. - :param persistence_dim_max: If true, the persistent homology for the - maximal dimension in the complex is computed. If false, it is - ignored. Default is false. - :type persistence_dim_max: bool - :returns: The persistence of the simplicial complex. - :rtype: list of pairs(dimension, pair(birth, death)) - """ - if self.pcohptr != NULL: - del self.pcohptr - self.pcohptr = new Simplex_tree_persistence_interface(self.thisptr, persistence_dim_max) - cdef vector[pair[int, pair[double, double]]] persistence_result - if self.pcohptr != NULL: - persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) - return persistence_result - - def betti_numbers(self): - """This function returns the Betti numbers of the simplicial complex. - - :returns: The Betti numbers ([B0, B1, ..., Bn]). - :rtype: list of int - - :note: betti_numbers function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[int] bn_result - if self.pcohptr != NULL: - bn_result = self.pcohptr.betti_numbers() - else: - print("betti_numbers function requires persistence function" - " to be launched first.") - return bn_result - - def persistent_betti_numbers(self, from_value, to_value): - """This function returns the persistent Betti numbers of the - simplicial complex. - - :param from_value: The persistence birth limit to be added in the - numbers (persistent birth <= from_value). - :type from_value: float. - :param to_value: The persistence death limit to be added in the - numbers (persistent death > to_value). - :type to_value: float. - - :returns: The persistent Betti numbers ([B0, B1, ..., Bn]). - :rtype: list of int - - :note: persistent_betti_numbers function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[int] pbn_result - if self.pcohptr != NULL: - pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) - else: - print("persistent_betti_numbers function requires persistence function" - " to be launched first.") - return pbn_result - - def persistence_intervals_in_dimension(self, dimension): - """This function returns the persistence intervals of the simplicial - complex in a specific dimension. - - :param dimension: The specific dimension. - :type dimension: int. - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - - :note: intervals_in_dim function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[pair[double,double]] intervals_result - if self.pcohptr != NULL: - intervals_result = self.pcohptr.intervals_in_dimension(dimension) - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") - return np_array(intervals_result) - - def persistence_pairs(self): - """This function returns a list of persistence birth and death simplices pairs. - - :returns: A list of persistence simplices intervals. - :rtype: list of pair of list of int - - :note: persistence_pairs function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[pair[vector[int],vector[int]]] persistence_pairs_result - if self.pcohptr != NULL: - persistence_pairs_result = self.pcohptr.persistence_pairs() - else: - print("persistence_pairs function requires persistence function" - " to be launched first.") - return persistence_pairs_result - - def write_persistence_diagram(self, persistence_file=''): - """This function writes the persistence intervals of the simplicial - complex in a user given file name. - - :param persistence_file: The specific dimension. - :type persistence_file: string. - - :note: intervals_in_dim function requires - :func:`persistence()` - function to be launched first. - """ - if self.pcohptr != NULL: - if persistence_file != '': - self.pcohptr.write_output_diagram(str.encode(persistence_file)) - else: - print("persistence_file must be specified") - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") diff --git a/src/cython/cython/strong_witness_complex.pyx b/src/cython/cython/strong_witness_complex.pyx deleted file mode 100644 index 8c155815..00000000 --- a/src/cython/cython/strong_witness_complex.pyx +++ /dev/null @@ -1,71 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface": - Strong_witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - -# StrongWitnessComplex python interface -cdef class StrongWitnessComplex: - """Constructs (strong) witness complex for a given table of nearest - landmarks with respect to witnesses. - """ - - cdef Strong_witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, nearest_landmark_table=None): - """StrongWitnessComplex constructor. - - :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. - `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to - witness w, and d is the (squared) distance between l and w. - :type nearest_landmark_table: list of list of pair of int and float - """ - - # The real cython constructor - def __cinit__(self, nearest_landmark_table=None): - if nearest_landmark_table is not None: - self.thisptr = new Strong_witness_complex_interface(nearest_landmark_table) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if StrongWitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): - """ - :param max_alpha_square: The maximum relaxation parameter. - Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) - return simplex_tree diff --git a/src/cython/cython/subsampling.pyx b/src/cython/cython/subsampling.pyx deleted file mode 100644 index 1135c1fb..00000000 --- a/src/cython/cython/subsampling.pyx +++ /dev/null @@ -1,130 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.string cimport string -from libcpp cimport bool -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling": - vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points) - vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points, unsigned starting_point) - vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points) - vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points, unsigned starting_point) - vector[vector[double]] subsampling_n_random_points(vector[vector[double]] points, unsigned nb_points) - vector[vector[double]] subsampling_n_random_points_from_file(string off_file, unsigned nb_points) - vector[vector[double]] subsampling_sparsify_points(vector[vector[double]] points, double min_squared_dist) - vector[vector[double]] subsampling_sparsify_points_from_file(string off_file, double min_squared_dist) - -def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_point = ''): - """Subsample by a greedy strategy of iteratively adding the farthest point - from the current chosen point set to the subsampling. - The iteration starts with the landmark `starting point`. - - :param points: The input point set. - :type points: vector[vector[double]]. - - Or - - :param off_file: An OFF file style name. - :type off_file: string - - :param nb_points: Number of points of the subsample. - :type nb_points: unsigned. - :param starting_point: The iteration starts with the landmark `starting \ - point`,which is the index of the poit to start with. If not set, this \ - index is choosen randomly. - :type starting_point: unsigned. - :returns: The subsample point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - if starting_point is '': - return subsampling_n_farthest_points_from_file(str.encode(off_file), - nb_points) - else: - return subsampling_n_farthest_points_from_file(str.encode(off_file), - nb_points, - starting_point) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty points - points=[] - if starting_point is '': - return subsampling_n_farthest_points(points, nb_points) - else: - return subsampling_n_farthest_points(points, nb_points, - starting_point) - -def pick_n_random_points(points=None, off_file='', nb_points=0): - """Subsample a point set by picking random vertices. - - :param points: The input point set. - :type points: vector[vector[double]]. - - Or - - :param off_file: An OFF file style name. - :type off_file: string - - :param nb_points: Number of points of the subsample. - :type nb_points: unsigned. - :returns: The subsample point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - return subsampling_n_random_points_from_file(str.encode(off_file), - nb_points) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty points - points=[] - return subsampling_n_random_points(points, nb_points) - -def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0): - """Outputs a subset of the input points so that the squared distance - between any two points is greater than or equal to min_squared_dist. - - :param points: The input point set. - :type points: vector[vector[double]]. - - Or - - :param off_file: An OFF file style name. - :type off_file: string - - :param min_squared_dist: Minimum squared distance separating the output \ - points. - :type min_squared_dist: float. - :returns: The subsample point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - return subsampling_sparsify_points_from_file(str.encode(off_file), - min_squared_dist) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty points - points=[] - return subsampling_sparsify_points(points, min_squared_dist) diff --git a/src/cython/cython/tangential_complex.pyx b/src/cython/cython/tangential_complex.pyx deleted file mode 100644 index 00a84810..00000000 --- a/src/cython/cython/tangential_complex.pyx +++ /dev/null @@ -1,168 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Tangential_complex_interface.h" namespace "Gudhi": - cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface": - Tangential_complex_interface(int intrisic_dim, vector[vector[double]] points) - # bool from_file is a workaround for cython to find the correct signature - Tangential_complex_interface(int intrisic_dim, string off_file, bool from_file) - void compute_tangential_complex() except + - vector[double] get_point(unsigned vertex) - unsigned number_of_vertices() - unsigned number_of_simplices() - unsigned number_of_inconsistent_simplices() - unsigned number_of_inconsistent_stars() - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) - void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) - void set_max_squared_edge_length(double max_squared_edge_length) - -# TangentialComplex python interface -cdef class TangentialComplex: - """The class Tangential_complex represents a tangential complex. After the - computation of the complex, an optional post-processing called perturbation - can be run to attempt to remove inconsistencies. - """ - - cdef Tangential_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, intrisic_dim, points=None, off_file=''): - """TangentialComplex constructor. - - :param intrisic_dim: Intrinsic dimension of the manifold. - :type intrisic_dim: integer - - :param points: A list of points in d-Dimension. - :type points: list of list of double - - Or - - :param off_file: An OFF file style name. - :type off_file: string - """ - - # The real cython constructor - def __cinit__(self, intrisic_dim, points=None, off_file=''): - if off_file is not '': - if os.path.isfile(off_file): - self.thisptr = new Tangential_complex_interface(intrisic_dim, str.encode(off_file), True) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty tangential construction - points=[] - self.thisptr = new Tangential_complex_interface(intrisic_dim, points) - - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if TangentialComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def compute_tangential_complex(self): - """This function computes the tangential complex. - - Raises: - ValueError: In debug mode, if the computed star dimension is too - low. Try to set a bigger maximal edge length value with - :func:`~gudhi.Tangential_complex.set_max_squared_edge_length` - if this happens. - """ - self.thisptr.compute_tangential_complex() - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int. - :returns: The point. - :rtype: list of float - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - - def num_vertices(self): - """ - :returns: The number of vertices. - :rtype: unsigned - """ - return self.thisptr.number_of_vertices() - - def num_simplices(self): - """ - :returns: Total number of simplices in stars (including duplicates that appear in several stars). - :rtype: unsigned - """ - return self.thisptr.number_of_simplices() - - def num_inconsistent_simplices(self): - """ - :returns: The number of inconsistent simplices. - :rtype: unsigned - """ - return self.thisptr.number_of_inconsistent_simplices() - - def num_inconsistent_stars(self): - """ - :returns: The number of stars containing at least one inconsistent simplex. - :rtype: unsigned - """ - return self.thisptr.number_of_inconsistent_stars() - - def create_simplex_tree(self): - """Exports the complex into a simplex tree. - - :returns: A simplex tree created from the complex. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - self.thisptr.create_simplex_tree(simplex_tree.thisptr) - return simplex_tree - - def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0): - """Attempts to fix inconsistencies by perturbing the point positions. - - :param max_perturb: Maximum length of the translations used by the - perturbation. - :type max_perturb: double - :param time_limit: Time limit in seconds. If -1, no time limit is set. - :type time_limit: double - """ - self.thisptr.fix_inconsistencies_using_perturbation(max_perturb, - time_limit) - - def set_max_squared_edge_length(self, max_squared_edge_length): - """Sets the maximal possible squared edge length for the edges in the - triangulations. - - :param max_squared_edge_length: Maximal possible squared edge length. - :type max_squared_edge_length: double - - If the maximal edge length value is too low - :func:`~gudhi.Tangential_complex.compute_tangential_complex` - will throw an exception in debug mode. - """ - self.thisptr.set_max_squared_edge_length(max_squared_edge_length) diff --git a/src/cython/cython/witness_complex.pyx b/src/cython/cython/witness_complex.pyx deleted file mode 100644 index 91046f57..00000000 --- a/src/cython/cython/witness_complex.pyx +++ /dev/null @@ -1,71 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface": - Witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - -# WitnessComplex python interface -cdef class WitnessComplex: - """Constructs (weak) witness complex for a given table of nearest landmarks - with respect to witnesses. - """ - - cdef Witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, nearest_landmark_table=None): - """WitnessComplex constructor. - - :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. - `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to - witness w, and d is the (squared) distance between l and w. - :type nearest_landmark_table: list of list of pair of int and float - """ - - # The real cython constructor - def __cinit__(self, nearest_landmark_table=None): - if nearest_landmark_table is not None: - self.thisptr = new Witness_complex_interface(nearest_landmark_table) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if WitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): - """ - :param max_alpha_square: The maximum relaxation parameter. - Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - simplex_tree = SimplexTree() - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) - return simplex_tree diff --git a/src/cython/gudhi/__init__.py.in b/src/cython/gudhi/__init__.py.in new file mode 100644 index 00000000..60ad7865 --- /dev/null +++ b/src/cython/gudhi/__init__.py.in @@ -0,0 +1,40 @@ +from importlib import import_module + +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "https://gudhi.inria.fr/licensing/" +__version__ = "@GUDHI_VERSION@" +# This variable is used by doctest to find files +__root_source_dir__ = "@CMAKE_SOURCE_DIR@" +__debug_info__ = @GUDHI_CYTHON_DEBUG_INFO@ + +from sys import exc_info +from importlib import import_module + +__all__ = [@GUDHI_CYTHON_MODULES@] + +__available_modules__ = '' +__missing_modules__ = '' + +# try to import * from gudhi.__module_name__ +for __module_name__ in __all__: + try: + __module__ = import_module('gudhi.' + __module_name__) + try: + __to_import__ = __module__.__all__ + except AttributeError: + __to_import__ = [name for name in __module__.__dict__ if not name.startswith('_')] + globals().update({name: __module__.__dict__[name] for name in __to_import__}) + __available_modules__ += __module_name__ + ";" + except: + __missing_modules__ += __module_name__ + ";" diff --git a/src/cython/gudhi/alpha_complex.pyx b/src/cython/gudhi/alpha_complex.pyx new file mode 100644 index 00000000..85131780 --- /dev/null +++ b/src/cython/gudhi/alpha_complex.pyx @@ -0,0 +1,115 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +from libc.stdint cimport intptr_t +import os + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": + cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": + Alpha_complex_interface(vector[vector[double]] points) + # bool from_file is a workaround for cython to find the correct signature + Alpha_complex_interface(string off_file, bool from_file) + vector[double] get_point(int vertex) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + +# AlphaComplex python interface +cdef class AlphaComplex: + """AlphaComplex is a simplicial complex constructed from the finite cells + of a Delaunay Triangulation. + + The filtration value of each simplex is computed as the square of the + circumradius of the simplex if the circumsphere is empty (the simplex is + then said to be Gabriel), and as the minimum of the filtration values of + the codimension 1 cofaces that make it not Gabriel otherwise. + + All simplices that have a filtration value strictly greater than a given + alpha squared value are not inserted into the complex. + + .. note:: + + When Alpha_complex is constructed with an infinite value of alpha, the + complex is a Delaunay complex. + + """ + + cdef Alpha_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, points=None, off_file=''): + """AlphaComplex constructor. + + :param points: A list of points in d-Dimension. + :type points: list of list of double + + Or + + :param off_file: An OFF file style name. + :type off_file: string + """ + + # The real cython constructor + def __cinit__(self, points=None, off_file=''): + if off_file is not '': + if os.path.isfile(off_file): + self.thisptr = new Alpha_complex_interface(str.encode(off_file), True) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty Alpha construction + points=[] + self.thisptr = new Alpha_complex_interface(points) + + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if AlphaComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int + :rtype: list of float + :returns: the point. + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + + def create_simplex_tree(self, max_alpha_square=float('inf')): + """ + :param max_alpha_square: The maximum alpha square threshold the + simplices shall not exceed. Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr + self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + return stree diff --git a/src/cython/gudhi/bottleneck_distance.pyx b/src/cython/gudhi/bottleneck_distance.pyx new file mode 100644 index 00000000..4b378cbc --- /dev/null +++ b/src/cython/gudhi/bottleneck_distance.pyx @@ -0,0 +1,49 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": + double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) + double bottleneck(vector[pair[double, double]], vector[pair[double, double]]) + +def bottleneck_distance(diagram_1, diagram_2, e=None): + """This function returns the point corresponding to a given vertex. + + :param diagram_1: The first diagram. + :type diagram_1: vector[pair[double, double]] + :param diagram_2: The second diagram. + :type diagram_2: vector[pair[double, double]] + :param e: If `e` is 0, this uses an expensive algorithm to compute the + exact distance. + If `e` is not 0, it asks for an additive `e`-approximation, and + currently also allows a small multiplicative error (the last 2 or 3 + bits of the mantissa may be wrong). This version of the algorithm takes + advantage of the limited precision of `double` and is usually a lot + faster to compute, whatever the value of `e`. + + Thus, by default, `e` is the smallest positive double. + :type e: float + :rtype: float + :returns: the bottleneck distance. + """ + if e is None: + # Default value is the smallest double value (not 0, 0 is for exact version) + return bottleneck(diagram_1, diagram_2) + else: + # Can be 0 for exact version + return bottleneck(diagram_1, diagram_2, e) diff --git a/src/cython/gudhi/cubical_complex.pyx b/src/cython/gudhi/cubical_complex.pyx new file mode 100644 index 00000000..0dc133d1 --- /dev/null +++ b/src/cython/gudhi/cubical_complex.pyx @@ -0,0 +1,188 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +import os + +from numpy import array as np_array + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": + cdef cppclass Bitmap_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<>": + Bitmap_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells) + Bitmap_cubical_complex_base_interface(string perseus_file) + int num_simplices() + int dimension() + +cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": + cdef cppclass Cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>": + Cubical_complex_persistence_interface(Bitmap_cubical_complex_base_interface * st, bool persistence_dim_max) + vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) + vector[int] betti_numbers() + vector[int] persistent_betti_numbers(double from_value, double to_value) + vector[pair[double,double]] intervals_in_dimension(int dimension) + +# CubicalComplex python interface +cdef class CubicalComplex: + """The CubicalComplex is an example of a structured complex useful in + computational mathematics (specially rigorous numerics) and image + analysis. + """ + cdef Bitmap_cubical_complex_base_interface * thisptr + + cdef Cubical_complex_persistence_interface * pcohptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, dimensions=None, top_dimensional_cells=None, + perseus_file=''): + """CubicalComplex constructor from dimensions and + top_dimensional_cells or from a Perseus-style file name. + + :param dimensions: A list of number of top dimensional cells. + :type dimensions: list of int + :param top_dimensional_cells: A list of cells filtration values. + :type top_dimensional_cells: list of double + + Or + + :param perseus_file: A Perseus-style file name. + :type perseus_file: string + """ + + # The real cython constructor + def __cinit__(self, dimensions=None, top_dimensional_cells=None, + perseus_file=''): + if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''): + self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells) + elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''): + if os.path.isfile(perseus_file): + self.thisptr = new Bitmap_cubical_complex_base_interface(str.encode(perseus_file)) + else: + print("file " + perseus_file + " not found.") + else: + print("CubicalComplex can be constructed from dimensions and " + "top_dimensional_cells or from a Perseus-style file name.") + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + if self.pcohptr != NULL: + del self.pcohptr + + def __is_defined(self): + """Returns true if CubicalComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def __is_persistence_defined(self): + """Returns true if Persistence pointer is not NULL. + """ + return self.pcohptr != NULL + + def num_simplices(self): + """This function returns the number of all cubes in the complex. + + :returns: int -- the number of all cubes in the complex. + """ + return self.thisptr.num_simplices() + + def dimension(self): + """This function returns the dimension of the complex. + + :returns: int -- the complex dimension. + """ + return self.thisptr.dimension() + + def persistence(self, homology_coeff_field=11, min_persistence=0): + """This function returns the persistence of the complex. + + :param homology_coeff_field: The homology coefficient field. Must be a + prime number + :type homology_coeff_field: int. + :param min_persistence: The minimum persistence value to take into + account (strictly greater than min_persistence). Default value is + 0.0. + Sets min_persistence to -1.0 to see all values. + :type min_persistence: float. + :returns: list of pairs(dimension, pair(birth, death)) -- the + persistence of the complex. + """ + if self.pcohptr != NULL: + del self.pcohptr + if self.thisptr != NULL: + self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True) + cdef vector[pair[int, pair[double, double]]] persistence_result + if self.pcohptr != NULL: + persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) + return persistence_result + + def betti_numbers(self): + """This function returns the Betti numbers of the complex. + + :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). + + :note: betti_numbers function requires persistence function to be + launched first. + + :note: betti_numbers function always returns [1, 0, 0, ...] as infinity + filtration cubes are not removed from the complex. + """ + cdef vector[int] bn_result + if self.pcohptr != NULL: + bn_result = self.pcohptr.betti_numbers() + return bn_result + + def persistent_betti_numbers(self, from_value, to_value): + """This function returns the persistent Betti numbers of the complex. + + :param from_value: The persistence birth limit to be added in the + numbers (persistent birth <= from_value). + :type from_value: float. + :param to_value: The persistence death limit to be added in the + numbers (persistent death > to_value). + :type to_value: float. + + :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., + Bn]). + + :note: persistent_betti_numbers function requires persistence + function to be launched first. + """ + cdef vector[int] pbn_result + if self.pcohptr != NULL: + pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) + return pbn_result + + def persistence_intervals_in_dimension(self, dimension): + """This function returns the persistence intervals of the complex in a + specific dimension. + + :param dimension: The specific dimension. + :type dimension: int. + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + + :note: intervals_in_dim function requires persistence function to be + launched first. + """ + cdef vector[pair[double,double]] intervals_result + if self.pcohptr != NULL: + intervals_result = self.pcohptr.intervals_in_dimension(dimension) + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") + return np_array(intervals_result) diff --git a/src/cython/gudhi/euclidean_strong_witness_complex.pyx b/src/cython/gudhi/euclidean_strong_witness_complex.pyx new file mode 100644 index 00000000..26bd8375 --- /dev/null +++ b/src/cython/gudhi/euclidean_strong_witness_complex.pyx @@ -0,0 +1,85 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface": + Euclidean_strong_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + vector[double] get_point(unsigned vertex) + +# EuclideanStrongWitnessComplex python interface +cdef class EuclideanStrongWitnessComplex: + """Constructs strong witness complex for given sets of witnesses and + landmarks in Euclidean space. + """ + + cdef Euclidean_strong_witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, landmarks=None, witnesses=None): + """WitnessComplex constructor. + + :param landmarks: A list of landmarks (in the point cloud). + :type landmarks: list of list of double + + :param witnesses: The point cloud. + :type witnesses: list of list of double + """ + + # The real cython constructor + def __cinit__(self, landmarks=None, witnesses=None): + if landmarks is not None and witnesses is not None: + self.thisptr = new Euclidean_strong_witness_complex_interface(landmarks, witnesses) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if WitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): + """ + :param max_alpha_square: The maximum alpha square threshold the + simplices shall not exceed. Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + simplex_tree = SimplexTree() + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) + return simplex_tree + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int. + :returns: The point. + :rtype: list of float + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + diff --git a/src/cython/gudhi/euclidean_witness_complex.pyx b/src/cython/gudhi/euclidean_witness_complex.pyx new file mode 100644 index 00000000..e687c6f3 --- /dev/null +++ b/src/cython/gudhi/euclidean_witness_complex.pyx @@ -0,0 +1,85 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface": + Euclidean_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + vector[double] get_point(unsigned vertex) + +# EuclideanWitnessComplex python interface +cdef class EuclideanWitnessComplex: + """Constructs (weak) witness complex for given sets of witnesses and + landmarks in Euclidean space. + """ + + cdef Euclidean_witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, landmarks=None, witnesses=None): + """WitnessComplex constructor. + + :param landmarks: A list of landmarks (in the point cloud). + :type landmarks: list of list of double + + :param witnesses: The point cloud. + :type witnesses: list of list of double + """ + + # The real cython constructor + def __cinit__(self, landmarks=None, witnesses=None): + if landmarks is not None and witnesses is not None: + self.thisptr = new Euclidean_witness_complex_interface(landmarks, witnesses) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if WitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): + """ + :param max_alpha_square: The maximum alpha square threshold the + simplices shall not exceed. Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + simplex_tree = SimplexTree() + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) + return simplex_tree + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int. + :returns: The point. + :rtype: list of float + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + diff --git a/src/cython/gudhi/nerve_gic.pyx b/src/cython/gudhi/nerve_gic.pyx new file mode 100644 index 00000000..3c8f1200 --- /dev/null +++ b/src/cython/gudhi/nerve_gic.pyx @@ -0,0 +1,407 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "GPL v3" + +cdef extern from "Nerve_gic_interface.h" namespace "Gudhi": + cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface": + Nerve_gic_interface() + double compute_confidence_level_from_distance(double distance) + double compute_distance_from_confidence_level(double alpha) + void compute_distribution(int N) + double compute_p_value() + vector[pair[double, double]] compute_PD() + void find_simplices() + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) + bool read_point_cloud(string off_file_name) + double set_automatic_resolution() + void set_color_from_coordinate(int k) + void set_color_from_file(string color_file_name) + void set_color_from_range(vector[double] color) + void set_cover_from_file(string cover_file_name) + void set_cover_from_function() + void set_cover_from_Euclidean_Voronoi(int m) + void set_function_from_coordinate(int k) + void set_function_from_file(string func_file_name) + void set_function_from_range(vector[double] function) + void set_gain(double g) + double set_graph_from_automatic_euclidean_rips(int N) + void set_graph_from_file(string graph_file_name) + void set_graph_from_OFF() + void set_graph_from_euclidean_rips(double threshold) + void set_mask(int nodemask) + void set_resolution_with_interval_length(double resolution) + void set_resolution_with_interval_number(int resolution) + void set_subsampling(double constant, double power) + void set_type(string type) + void set_verbose(bool verbose) + vector[int] subpopulation(int c) + void write_info() + void plot_DOT() + void plot_OFF() + void set_point_cloud_from_range(vector[vector[double]] cloud) + void set_distances_from_range(vector[vector[double]] distance_matrix) + +# CoverComplex python interface +cdef class CoverComplex: + """Cover complex data structure. + + The data structure is a simplicial complex, representing a Graph Induced + simplicial Complex (GIC) or a Nerve, and whose simplices are computed with + a cover C of a point cloud P, which often comes from the preimages of + intervals covering the image of a function f defined on P. These intervals + are parameterized by their resolution (either their length or their number) + and their gain (percentage of overlap). To compute a GIC, one also needs a + graph G built on top of P, whose cliques with vertices belonging to + different elements of C correspond to the simplices of the GIC. + """ + + cdef Nerve_gic_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self): + """CoverComplex constructor. + """ + + # The real cython constructor + def __cinit__(self): + self.thisptr = new Nerve_gic_interface() + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if CoverComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def set_point_cloud_from_range(self, cloud): + """ Reads and stores the input point cloud from a vector stored in memory. + + :param cloud: Input vector containing the point cloud. + :type cloud: vector[vector[double]] + """ + return self.thisptr.set_point_cloud_from_range(cloud) + + def set_distances_from_range(self, distance_matrix): + """ Reads and stores the input distance matrix from a vector stored in memory. + + :param distance_matrix: Input vector containing the distance matrix. + :type distance_matrix: vector[vector[double]] + """ + return self.thisptr.set_distances_from_range(distance_matrix) + + def compute_confidence_level_from_distance(self, distance): + """Computes the confidence level of a specific bottleneck distance + threshold. + + :param distance: Bottleneck distance. + :type distance: double + :rtype: double + :returns: Confidence level. + """ + return self.thisptr.compute_confidence_level_from_distance(distance) + + def compute_distance_from_confidence_level(self, alpha): + """Computes the bottleneck distance threshold corresponding to a + specific confidence level. + + :param alpha: Confidence level. + :type alpha: double + :rtype: double + :returns: Bottleneck distance. + """ + return self.thisptr.compute_distance_from_confidence_level(alpha) + + def compute_distribution(self, N=100): + """Computes bootstrapped distances distribution. + + :param N: Loop number (default value is 100). + :type alpha: int + """ + self.thisptr.compute_distribution(N) + + def compute_p_value(self): + """Computes the p-value, i.e. the opposite of the confidence level of + the largest bottleneck distance preserving the points in the + persistence diagram of the output simplicial complex. + + :rtype: double + :returns: p-value. + """ + return self.thisptr.compute_p_value() + + def compute_PD(self): + """Computes the extended persistence diagram of the complex. + """ + return self.thisptr.compute_PD() + + def create_simplex_tree(self): + """ + :returns: A simplex tree created from the Cover complex. + :rtype: SimplexTree + """ + simplex_tree = SimplexTree() + self.thisptr.create_simplex_tree(simplex_tree.thisptr) + return simplex_tree + + def find_simplices(self): + """Computes the simplices of the simplicial complex. + """ + self.thisptr.find_simplices() + + def read_point_cloud(self, off_file): + """Reads and stores the input point cloud from .(n)OFF file. + + :param off_file: Name of the input .OFF or .nOFF file. + :type off_file: string + :rtype: bool + :returns: Read file status. + """ + if os.path.isfile(off_file): + return self.thisptr.read_point_cloud(str.encode(off_file)) + else: + print("file " + off_file + " not found.") + return False + + def set_automatic_resolution(self): + """Computes the optimal length of intervals (i.e. the smallest interval + length avoiding discretization artifacts—see :cite:`Carriere17c`) for a + functional cover. + + :rtype: double + :returns: reso interval length used to compute the cover. + """ + return self.thisptr.set_automatic_resolution() + + def set_color_from_coordinate(self, k=0): + """Computes the function used to color the nodes of the simplicial + complex from the k-th coordinate. + + :param k: Coordinate to use (start at 0). Default value is 0. + :type k: int + """ + return self.thisptr.set_color_from_coordinate(k) + + def set_color_from_file(self, color_file_name): + """Computes the function used to color the nodes of the simplicial + complex from a file containing the function values. + + :param color_file_name: Name of the input color file. + :type color_file_name: string + """ + if os.path.isfile(color_file_name): + self.thisptr.set_color_from_file(str.encode(color_file_name)) + else: + print("file " + color_file_name + " not found.") + + def set_color_from_range(self, color): + """Computes the function used to color the nodes of the simplicial + complex from a vector stored in memory. + + :param color: Input vector of values. + :type color: vector[double] + """ + self.thisptr.set_color_from_range(color) + + def set_cover_from_file(self, cover_file_name): + """Creates the cover C from a file containing the cover elements of + each point (the order has to be the same as in the input file!). + + :param cover_file_name: Name of the input cover file. + :type cover_file_name: string + """ + if os.path.isfile(cover_file_name): + self.thisptr.set_cover_from_file(str.encode(cover_file_name)) + else: + print("file " + cover_file_name + " not found.") + + def set_cover_from_function(self): + """Creates a cover C from the preimages of the function f. + """ + self.thisptr.set_cover_from_function() + + def set_cover_from_Voronoi(self, m=100): + """Creates the cover C from the Voronoï cells of a subsampling of the + point cloud. + + :param m: Number of points in the subsample. Default value is 100. + :type m: int + """ + self.thisptr.set_cover_from_Euclidean_Voronoi(m) + + def set_function_from_coordinate(self, k): + """Creates the function f from the k-th coordinate of the point cloud. + + :param k: Coordinate to use (start at 0). + :type k: int + """ + self.thisptr.set_function_from_coordinate(k) + + def set_function_from_file(self, func_file_name): + """Creates the function f from a file containing the function values. + + :param func_file_name: Name of the input function file. + :type func_file_name: string + """ + if os.path.isfile(func_file_name): + self.thisptr.set_function_from_file(str.encode(func_file_name)) + else: + print("file " + func_file_name + " not found.") + + def set_function_from_range(self, function): + """Creates the function f from a vector stored in memory. + + :param function: Input vector of values. + :type function: vector[double] + """ + self.thisptr.set_function_from_range(function) + + def set_gain(self, g = 0.3): + """Sets a gain from a value stored in memory. + + :param g: Gain (default value is 0.3). + :type g: double + """ + self.thisptr.set_gain(g) + + def set_graph_from_automatic_rips(self, N=100): + """Creates a graph G from a Rips complex whose threshold value is + automatically tuned with subsampling—see. + + :param N: Number of subsampling iteration (the default reasonable value + is 100, but there is no guarantee on how to choose it). + :type N: int + :rtype: double + :returns: Delta threshold used for computing the Rips complex. + """ + return self.thisptr.set_graph_from_automatic_euclidean_rips(N) + + def set_graph_from_file(self, graph_file_name): + """Creates a graph G from a file containing the edges. + + :param graph_file_name: Name of the input graph file. The graph file + contains one edge per line, each edge being represented by the IDs of + its two nodes. + :type graph_file_name: string + """ + if os.path.isfile(graph_file_name): + self.thisptr.set_graph_from_file(str.encode(graph_file_name)) + else: + print("file " + graph_file_name + " not found.") + + def set_graph_from_OFF(self): + """Creates a graph G from the triangulation given by the input OFF + file. + """ + self.thisptr.set_graph_from_OFF() + + def set_graph_from_rips(self, threshold): + """Creates a graph G from a Rips complex. + + :param threshold: Threshold value for the Rips complex. + :type threshold: double + """ + self.thisptr.set_graph_from_euclidean_rips(threshold) + + def set_mask(self, nodemask): + """Sets the mask, which is a threshold integer such that nodes in the + complex that contain a number of data points which is less than or + equal to this threshold are not displayed. + + :param nodemask: Threshold. + :type nodemask: int + """ + self.thisptr.set_mask(nodemask) + + def set_resolution_with_interval_length(self, resolution): + """Sets a length of intervals from a value stored in memory. + + :param resolution: Length of intervals. + :type resolution: double + """ + self.thisptr.set_resolution_with_interval_length(resolution) + + def set_resolution_with_interval_number(self, resolution): + """Sets a number of intervals from a value stored in memory. + + :param resolution: Number of intervals. + :type resolution: int + """ + self.thisptr.set_resolution_with_interval_number(resolution) + + def set_subsampling(self, constant, power): + """Sets the constants used to subsample the data set. These constants + are explained in :cite:`Carriere17c`. + + :param constant: Constant. + :type constant: double + :param power: Power. + :type resolution: double + """ + self.thisptr.set_subsampling(constant, power) + + def set_type(self, type): + """Specifies whether the type of the output simplicial complex. + + :param type: either "GIC" or "Nerve". + :type type: string + """ + self.thisptr.set_type(str.encode(type)) + + def set_verbose(self, verbose): + """Specifies whether the program should display information or not. + + :param verbose: true = display info, false = do not display info. + :type verbose: boolean + """ + self.thisptr.set_verbose(verbose) + + def subpopulation(self, c): + """Returns the data subset corresponding to a specific node of the + created complex. + + :param c: ID of the node. + :type c: int + :rtype: vector[int] + :returns: Vector of IDs of data points. + """ + return self.thisptr.subpopulation(c) + + def write_info(self): + """Creates a .txt file called SC.txt describing the 1-skeleton, which can + then be plotted with e.g. KeplerMapper. + """ + return self.thisptr.write_info() + + def plot_dot(self): + """Creates a .dot file called SC.dot for neato (part of the graphviz + package) once the simplicial complex is computed to get a visualization of + its 1-skeleton in a .pdf file. + """ + return self.thisptr.plot_DOT() + + def plot_off(self): + """Creates a .off file called SC.off for 3D visualization, which contains + the 2-skeleton of the GIC. This function assumes that the cover has been + computed with Voronoi. If data points are in 1D or 2D, the remaining + coordinates of the points embedded in 3D are set to 0. + """ + return self.thisptr.plot_OFF() diff --git a/src/cython/gudhi/off_reader.pyx b/src/cython/gudhi/off_reader.pyx new file mode 100644 index 00000000..9efd97ff --- /dev/null +++ b/src/cython/gudhi/off_reader.pyx @@ -0,0 +1,38 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.string cimport string +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Off_reader_interface.h" namespace "Gudhi": + vector[vector[double]] read_points_from_OFF_file(string off_file) + +def read_off(off_file=''): + """Read points from OFF file. + + :param off_file: An OFF file style name. + :type off_file: string + + :returns: The point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + return read_points_from_OFF_file(str.encode(off_file)) + else: + print("file " + off_file + " not found.") + return [] + diff --git a/src/cython/gudhi/periodic_cubical_complex.pyx b/src/cython/gudhi/periodic_cubical_complex.pyx new file mode 100644 index 00000000..724fadd4 --- /dev/null +++ b/src/cython/gudhi/periodic_cubical_complex.pyx @@ -0,0 +1,190 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +import os + +from numpy import array as np_array + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": + cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface>": + Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells, vector[bool] periodic_dimensions) + Periodic_cubical_complex_base_interface(string perseus_file) + int num_simplices() + int dimension() + +cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": + cdef cppclass Periodic_cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>>": + Periodic_cubical_complex_persistence_interface(Periodic_cubical_complex_base_interface * st, bool persistence_dim_max) + vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) + vector[int] betti_numbers() + vector[int] persistent_betti_numbers(double from_value, double to_value) + vector[pair[double,double]] intervals_in_dimension(int dimension) + +# PeriodicCubicalComplex python interface +cdef class PeriodicCubicalComplex: + """The PeriodicCubicalComplex is an example of a structured complex useful + in computational mathematics (specially rigorous numerics) and image + analysis. + """ + cdef Periodic_cubical_complex_base_interface * thisptr + + cdef Periodic_cubical_complex_persistence_interface * pcohptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, dimensions=None, top_dimensional_cells=None, + periodic_dimensions=None, perseus_file=''): + """PeriodicCubicalComplex constructor from dimensions and + top_dimensional_cells or from a Perseus-style file name. + + :param dimensions: A list of number of top dimensional cells. + :type dimensions: list of int + :param top_dimensional_cells: A list of cells filtration values. + :type top_dimensional_cells: list of double + :param periodic_dimensions: A list of top dimensional cells periodicity value. + :type periodic_dimensions: list of boolean + + Or + + :param perseus_file: A Perseus-style file name. + :type perseus_file: string + """ + + # The real cython constructor + def __cinit__(self, dimensions=None, top_dimensional_cells=None, + periodic_dimensions=None, perseus_file=''): + if (dimensions is not None) and (top_dimensional_cells is not None) and (periodic_dimensions is not None) and (perseus_file is ''): + self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells, periodic_dimensions) + elif (dimensions is None) and (top_dimensional_cells is None) and (periodic_dimensions is None) and (perseus_file is not ''): + if os.path.isfile(perseus_file): + self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file)) + else: + print("file " + perseus_file + " not found.") + else: + print("CubicalComplex can be constructed from dimensions and " + "top_dimensional_cells or from a Perseus-style file name.") + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + if self.pcohptr != NULL: + del self.pcohptr + + def __is_defined(self): + """Returns true if PeriodicCubicalComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def __is_persistence_defined(self): + """Returns true if Persistence pointer is not NULL. + """ + return self.pcohptr != NULL + + def num_simplices(self): + """This function returns the number of all cubes in the complex. + + :returns: int -- the number of all cubes in the complex. + """ + return self.thisptr.num_simplices() + + def dimension(self): + """This function returns the dimension of the complex. + + :returns: int -- the complex dimension. + """ + return self.thisptr.dimension() + + def persistence(self, homology_coeff_field=11, min_persistence=0): + """This function returns the persistence of the complex. + + :param homology_coeff_field: The homology coefficient field. Must be a + prime number + :type homology_coeff_field: int. + :param min_persistence: The minimum persistence value to take into + account (strictly greater than min_persistence). Default value is + 0.0. + Sets min_persistence to -1.0 to see all values. + :type min_persistence: float. + :returns: list of pairs(dimension, pair(birth, death)) -- the + persistence of the complex. + """ + if self.pcohptr != NULL: + del self.pcohptr + if self.thisptr != NULL: + self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True) + cdef vector[pair[int, pair[double, double]]] persistence_result + if self.pcohptr != NULL: + persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) + return persistence_result + + def betti_numbers(self): + """This function returns the Betti numbers of the complex. + + :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). + + :note: betti_numbers function requires persistence function to be + launched first. + + :note: betti_numbers function always returns [1, 0, 0, ...] as infinity + filtration cubes are not removed from the complex. + """ + cdef vector[int] bn_result + if self.pcohptr != NULL: + bn_result = self.pcohptr.betti_numbers() + return bn_result + + def persistent_betti_numbers(self, from_value, to_value): + """This function returns the persistent Betti numbers of the complex. + + :param from_value: The persistence birth limit to be added in the + numbers (persistent birth <= from_value). + :type from_value: float. + :param to_value: The persistence death limit to be added in the + numbers (persistent death > to_value). + :type to_value: float. + + :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., + Bn]). + + :note: persistent_betti_numbers function requires persistence + function to be launched first. + """ + cdef vector[int] pbn_result + if self.pcohptr != NULL: + pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) + return pbn_result + + def persistence_intervals_in_dimension(self, dimension): + """This function returns the persistence intervals of the complex in a + specific dimension. + + :param dimension: The specific dimension. + :type dimension: int. + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + + :note: intervals_in_dim function requires persistence function to be + launched first. + """ + cdef vector[pair[double,double]] intervals_result + if self.pcohptr != NULL: + intervals_result = self.pcohptr.intervals_in_dimension(dimension) + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") + return np_array(intervals_result) diff --git a/src/cython/gudhi/persistence_graphical_tools.py b/src/cython/gudhi/persistence_graphical_tools.py new file mode 100644 index 00000000..34803222 --- /dev/null +++ b/src/cython/gudhi/persistence_graphical_tools.py @@ -0,0 +1,420 @@ +from os import path +from math import isfinite +import numpy as np + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau, Bertrand Michel + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau, Bertrand Michel" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def __min_birth_max_death(persistence, band=0.0): + """This function returns (min_birth, max_death) from the persistence. + + :param persistence: The persistence to plot. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param band: band + :type band: float. + :returns: (float, float) -- (min_birth, max_death). + """ + # Look for minimum birth date and maximum death date for plot optimisation + max_death = 0 + min_birth = persistence[0][1][0] + for interval in reversed(persistence): + if float(interval[1][1]) != float("inf"): + if float(interval[1][1]) > max_death: + max_death = float(interval[1][1]) + if float(interval[1][0]) > max_death: + max_death = float(interval[1][0]) + if float(interval[1][0]) < min_birth: + min_birth = float(interval[1][0]) + if band > 0.0: + max_death += band + return (min_birth, max_death) + + +""" +Only 13 colors for the palette +""" +palette = [ + "#ff0000", + "#00ff00", + "#0000ff", + "#00ffff", + "#ff00ff", + "#ffff00", + "#000000", + "#880000", + "#008800", + "#000088", + "#888800", + "#880088", + "#008888", +] + + +def plot_persistence_barcode( + persistence=[], + persistence_file="", + alpha=0.6, + max_intervals=1000, + max_barcodes=1000, + inf_delta=0.1, + legend=False, +): + """This function plots the persistence bar code from persistence values list + or from a :doc:`persistence file `. + + :param persistence: Persistence intervals values list grouped by dimension. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param persistence_file: A :doc:`persistence file ` style name + (reset persistence if both are set). + :type persistence_file: string + :param alpha: barcode transparency value (0.0 transparent through 1.0 + opaque - default is 0.6). + :type alpha: float. + :param max_intervals: maximal number of intervals to display. + Selected intervals are those with the longest life time. Set it + to 0 to see all. Default value is 1000. + :type max_intervals: int. + :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x + inf_delta)` above :code:`max_death` value. A reasonable value is + between 0.05 and 0.5 - default is 0.1. + :type inf_delta: float. + :param legend: Display the dimension color legend (default is False). + :type legend: boolean. + :returns: A matplotlib object containing horizontal bar plot of persistence + (launch `show()` method on it to display it). + """ + try: + import matplotlib.pyplot as plt + import matplotlib.patches as mpatches + + if persistence_file is not "": + if path.isfile(persistence_file): + # Reset persistence + persistence = [] + diag = read_persistence_intervals_grouped_by_dimension( + persistence_file=persistence_file + ) + for key in diag.keys(): + for persistence_interval in diag[key]: + persistence.append((key, persistence_interval)) + else: + print("file " + persistence_file + " not found.") + return None + + if max_barcodes is not 1000: + print("Deprecated parameter. It has been replaced by max_intervals") + max_intervals = max_barcodes + + if max_intervals > 0 and max_intervals < len(persistence): + # Sort by life time, then takes only the max_intervals elements + persistence = sorted( + persistence, + key=lambda life_time: life_time[1][1] - life_time[1][0], + reverse=True, + )[:max_intervals] + + persistence = sorted(persistence, key=lambda birth: birth[1][0]) + + (min_birth, max_death) = __min_birth_max_death(persistence) + ind = 0 + delta = (max_death - min_birth) * inf_delta + # Replace infinity values with max_death + delta for bar code to be more + # readable + infinity = max_death + delta + axis_start = min_birth - delta + # Draw horizontal bars in loop + for interval in reversed(persistence): + if float(interval[1][1]) != float("inf"): + # Finite death case + plt.barh( + ind, + (interval[1][1] - interval[1][0]), + height=0.8, + left=interval[1][0], + alpha=alpha, + color=palette[interval[0]], + linewidth=0, + ) + else: + # Infinite death case for diagram to be nicer + plt.barh( + ind, + (infinity - interval[1][0]), + height=0.8, + left=interval[1][0], + alpha=alpha, + color=palette[interval[0]], + linewidth=0, + ) + ind = ind + 1 + + if legend: + dimensions = list(set(item[0] for item in persistence)) + plt.legend( + handles=[ + mpatches.Patch(color=palette[dim], label=str(dim)) + for dim in dimensions + ], + loc="lower right", + ) + plt.title("Persistence barcode") + # Ends plot on infinity value and starts a little bit before min_birth + plt.axis([axis_start, infinity, 0, ind]) + return plt + + except ImportError: + print("This function is not available, you may be missing matplotlib.") + + +def plot_persistence_diagram( + persistence=[], + persistence_file="", + alpha=0.6, + band=0.0, + max_intervals=1000, + max_plots=1000, + inf_delta=0.1, + legend=False, +): + """This function plots the persistence diagram from persistence values + list or from a :doc:`persistence file `. + + :param persistence: Persistence intervals values list grouped by dimension. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param persistence_file: A :doc:`persistence file ` style name + (reset persistence if both are set). + :type persistence_file: string + :param alpha: plot transparency value (0.0 transparent through 1.0 + opaque - default is 0.6). + :type alpha: float. + :param band: band (not displayed if :math:`\leq` 0. - default is 0.) + :type band: float. + :param max_intervals: maximal number of intervals to display. + Selected intervals are those with the longest life time. Set it + to 0 to see all. Default value is 1000. + :type max_intervals: int. + :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x + inf_delta)` above :code:`max_death` value. A reasonable value is + between 0.05 and 0.5 - default is 0.1. + :type inf_delta: float. + :param legend: Display the dimension color legend (default is False). + :type legend: boolean. + :returns: A matplotlib object containing diagram plot of persistence + (launch `show()` method on it to display it). + """ + try: + import matplotlib.pyplot as plt + import matplotlib.patches as mpatches + + if persistence_file is not "": + if path.isfile(persistence_file): + # Reset persistence + persistence = [] + diag = read_persistence_intervals_grouped_by_dimension( + persistence_file=persistence_file + ) + for key in diag.keys(): + for persistence_interval in diag[key]: + persistence.append((key, persistence_interval)) + else: + print("file " + persistence_file + " not found.") + return None + + if max_plots is not 1000: + print("Deprecated parameter. It has been replaced by max_intervals") + max_intervals = max_plots + + if max_intervals > 0 and max_intervals < len(persistence): + # Sort by life time, then takes only the max_intervals elements + persistence = sorted( + persistence, + key=lambda life_time: life_time[1][1] - life_time[1][0], + reverse=True, + )[:max_intervals] + + (min_birth, max_death) = __min_birth_max_death(persistence, band) + delta = (max_death - min_birth) * inf_delta + # Replace infinity values with max_death + delta for diagram to be more + # readable + infinity = max_death + delta + axis_start = min_birth - delta + + # line display of equation : birth = death + x = np.linspace(axis_start, infinity, 1000) + # infinity line and text + plt.plot(x, x, color="k", linewidth=1.0) + plt.plot(x, [infinity] * len(x), linewidth=1.0, color="k", alpha=alpha) + plt.text(axis_start, infinity, r"$\infty$", color="k", alpha=alpha) + # bootstrap band + if band > 0.0: + plt.fill_between(x, x, x + band, alpha=alpha, facecolor="red") + + # Draw points in loop + for interval in reversed(persistence): + if float(interval[1][1]) != float("inf"): + # Finite death case + plt.scatter( + interval[1][0], + interval[1][1], + alpha=alpha, + color=palette[interval[0]], + ) + else: + # Infinite death case for diagram to be nicer + plt.scatter( + interval[1][0], infinity, alpha=alpha, color=palette[interval[0]] + ) + + if legend: + dimensions = list(set(item[0] for item in persistence)) + plt.legend( + handles=[ + mpatches.Patch(color=palette[dim], label=str(dim)) + for dim in dimensions + ] + ) + + plt.title("Persistence diagram") + plt.xlabel("Birth") + plt.ylabel("Death") + # Ends plot on infinity value and starts a little bit before min_birth + plt.axis([axis_start, infinity, axis_start, infinity + delta]) + return plt + + except ImportError: + print("This function is not available, you may be missing matplotlib.") + + +def plot_persistence_density( + persistence=[], + persistence_file="", + nbins=300, + bw_method=None, + max_intervals=1000, + dimension=None, + cmap=None, + legend=False, +): + """This function plots the persistence density from persistence + values list or from a :doc:`persistence file `. Be + aware that this function does not distinguish the dimension, it is + up to you to select the required one. This function also does not handle + degenerate data set (scipy correlation matrix inversion can fail). + + :param persistence: Persistence intervals values list grouped by dimension. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param persistence_file: A :doc:`persistence file ` + style name (reset persistence if both are set). + :type persistence_file: string + :param nbins: Evaluate a gaussian kde on a regular grid of nbins x + nbins over data extents (default is 300) + :type nbins: int. + :param bw_method: The method used to calculate the estimator + bandwidth. This can be 'scott', 'silverman', a scalar constant + or a callable. If a scalar, this will be used directly as + kde.factor. If a callable, it should take a gaussian_kde + instance as only parameter and return a scalar. If None + (default), 'scott' is used. See + `scipy.stats.gaussian_kde documentation + `_ + for more details. + :type bw_method: str, scalar or callable, optional. + :param max_intervals: maximal number of points used in the density + estimation. + Selected intervals are those with the longest life time. Set it + to 0 to see all. Default value is 1000. + :type max_intervals: int. + :param dimension: the dimension to be selected in the intervals + (default is None to mix all dimensions). + :type dimension: int. + :param cmap: A matplotlib colormap (default is + matplotlib.pyplot.cm.hot_r). + :type cmap: cf. matplotlib colormap. + :param legend: Display the color bar values (default is False). + :type legend: boolean. + :returns: A matplotlib object containing diagram plot of persistence + (launch `show()` method on it to display it). + """ + try: + import matplotlib.pyplot as plt + from scipy.stats import kde + + if persistence_file is not "": + if dimension is None: + # All dimension case + dimension = -1 + if path.isfile(persistence_file): + persistence_dim = read_persistence_intervals_in_dimension( + persistence_file=persistence_file, only_this_dim=dimension + ) + print(persistence_dim) + else: + print("file " + persistence_file + " not found.") + return None + + if len(persistence) > 0: + persistence_dim = np.array( + [ + (dim_interval[1][0], dim_interval[1][1]) + for dim_interval in persistence + if (dim_interval[0] == dimension) or (dimension is None) + ] + ) + + persistence_dim = persistence_dim[np.isfinite(persistence_dim[:, 1])] + if max_intervals > 0 and max_intervals < len(persistence_dim): + # Sort by life time, then takes only the max_intervals elements + persistence_dim = np.array( + sorted( + persistence_dim, + key=lambda life_time: life_time[1] - life_time[0], + reverse=True, + )[:max_intervals] + ) + + # Set as numpy array birth and death (remove undefined values - inf and NaN) + birth = persistence_dim[:, 0] + death = persistence_dim[:, 1] + + # line display of equation : birth = death + x = np.linspace(death.min(), birth.max(), 1000) + plt.plot(x, x, color="k", linewidth=1.0) + + # Evaluate a gaussian kde on a regular grid of nbins x nbins over data extents + k = kde.gaussian_kde([birth, death], bw_method=bw_method) + xi, yi = np.mgrid[ + birth.min() : birth.max() : nbins * 1j, + death.min() : death.max() : nbins * 1j, + ] + zi = k(np.vstack([xi.flatten(), yi.flatten()])) + + # default cmap value cannot be done at argument definition level as matplotlib is not yet defined. + if cmap is None: + cmap = plt.cm.hot_r + # Make the plot + plt.pcolormesh(xi, yi, zi.reshape(xi.shape), cmap=cmap) + + if legend: + plt.colorbar() + + plt.title("Persistence density") + plt.xlabel("Birth") + plt.ylabel("Death") + return plt + + except ImportError: + print( + "This function is not available, you may be missing matplotlib and/or scipy." + ) diff --git a/src/cython/gudhi/reader_utils.pyx b/src/cython/gudhi/reader_utils.pyx new file mode 100644 index 00000000..147fae71 --- /dev/null +++ b/src/cython/gudhi/reader_utils.pyx @@ -0,0 +1,87 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.string cimport string +from libcpp.map cimport map +from libcpp.pair cimport pair + +from os import path +from numpy import array as np_array + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2017 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2017 Inria" +__license__ = "MIT" + +cdef extern from "Reader_utils_interface.h" namespace "Gudhi": + vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator) + map[int, vector[pair[double, double]]] read_pers_intervals_grouped_by_dimension(string filename) + vector[pair[double, double]] read_pers_intervals_in_dimension(string filename, int only_this_dim) + +def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'): + """Read lower triangular matrix from a CSV style file. + + :param csv_file: A CSV file style name. + :type csv_file: string + :param separator: The value separator in the CSV file. Default value is ';' + :type separator: char + + :returns: The lower triangular matrix. + :rtype: vector[vector[double]] + """ + if csv_file is not '': + if path.isfile(csv_file): + return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0])) + print("file " + csv_file + " not set or not found.") + return [] + +def read_persistence_intervals_grouped_by_dimension(persistence_file=''): + """Reads a file containing persistence intervals. + Each line might contain 2, 3 or 4 values: [[field] dimension] birth death + The return value is an `map[dim, vector[pair[birth, death]]]` + where `dim` is an `int`, `birth` a `double`, and `death` a `double`. + Note: the function does not check that birth <= death. + + :param persistence_file: A persistence file style name. + :type persistence_file: string + + :returns: The persistence pairs grouped by dimension. + :rtype: map[int, vector[pair[double, double]]] + """ + if persistence_file is not '': + if path.isfile(persistence_file): + return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file)) + print("file " + persistence_file + " not set or not found.") + return [] + +def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-1): + """Reads a file containing persistence intervals. + Each line of persistence_file might contain 2, 3 or 4 values: + [[field] dimension] birth death + Note: the function does not check that birth <= death. + + :param persistence_file: A persistence file style name. + :type persistence_file: string + :param only_this_dim: The specific dimension. Default value is -1. + If `only_this_dim` = -1, dimension is ignored and all lines are returned. + If `only_this_dim` is >= 0, only the lines where dimension = + `only_this_dim` (or where dimension is not specified) are returned. + :type only_this_dim: int. + + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + """ + if persistence_file is not '': + if path.isfile(persistence_file): + return np_array(read_pers_intervals_in_dimension(str.encode( + persistence_file), only_this_dim)) + print("file " + persistence_file + " not set or not found.") + return [] diff --git a/src/cython/gudhi/rips_complex.pyx b/src/cython/gudhi/rips_complex.pyx new file mode 100644 index 00000000..1a6c8571 --- /dev/null +++ b/src/cython/gudhi/rips_complex.pyx @@ -0,0 +1,102 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Rips_complex_interface.h" namespace "Gudhi": + cdef cppclass Rips_complex_interface "Gudhi::rips_complex::Rips_complex_interface": + Rips_complex_interface() + void init_points(vector[vector[double]] values, double threshold) + void init_matrix(vector[vector[double]] values, double threshold) + void init_points_sparse(vector[vector[double]] values, double threshold, double sparse) + void init_matrix_sparse(vector[vector[double]] values, double threshold, double sparse) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max) + +# RipsComplex python interface +cdef class RipsComplex: + """The data structure is a one skeleton graph, or Rips graph, containing + edges when the edge length is less or equal to a given threshold. Edge + length is computed from a user given point cloud with a given distance + function, or a distance matrix. + """ + + cdef Rips_complex_interface thisref + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, points=None, distance_matrix=None, + max_edge_length=float('inf'), sparse=None): + """RipsComplex constructor. + + :param max_edge_length: Rips value. + :type max_edge_length: float + + :param points: A list of points in d-Dimension. + :type points: list of list of double + + Or + + :param distance_matrix: A distance matrix (full square or lower + triangular). + :type points: list of list of double + + And in both cases + + :param sparse: If this is not None, it switches to building a sparse + Rips and represents the approximation parameter epsilon. + :type sparse: float + """ + + # The real cython constructor + def __cinit__(self, points=None, distance_matrix=None, + max_edge_length=float('inf'), sparse=None): + if sparse is not None: + if distance_matrix is not None: + self.thisref.init_matrix_sparse(distance_matrix, + max_edge_length, + sparse) + else: + if points is None: + # Empty Rips construction + points=[] + self.thisref.init_points_sparse(points, max_edge_length, sparse) + else: + if distance_matrix is not None: + self.thisref.init_matrix(distance_matrix, max_edge_length) + else: + if points is None: + # Empty Rips construction + points=[] + self.thisref.init_points(points, max_edge_length) + + + def create_simplex_tree(self, max_dimension=1): + """ + :param max_dimension: graph expansion for rips until this given maximal + dimension. + :type max_dimension: int + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_ptr=stree.thisptr + self.thisref.create_simplex_tree(stree_ptr, max_dimension) + return stree diff --git a/src/cython/gudhi/simplex_tree.pxd b/src/cython/gudhi/simplex_tree.pxd new file mode 100644 index 00000000..25051295 --- /dev/null +++ b/src/cython/gudhi/simplex_tree.pxd @@ -0,0 +1,46 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp cimport bool +from libcpp.string cimport string + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": + cdef cppclass Simplex_tree_options_full_featured: + pass + + cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface": + Simplex_tree() + double simplex_filtration(vector[int] simplex) + void assign_simplex_filtration(vector[int] simplex, double filtration) + void initialize_filtration() + int num_vertices() + int num_simplices() + void set_dimension(int dimension) + int dimension() + int upper_bound_dimension() + bool find_simplex(vector[int] simplex) + bool insert_simplex_and_subfaces(vector[int] simplex, + double filtration) + vector[pair[vector[int], double]] get_filtration() + vector[pair[vector[int], double]] get_skeleton(int dimension) + vector[pair[vector[int], double]] get_star(vector[int] simplex) + vector[pair[vector[int], double]] get_cofaces(vector[int] simplex, + int dimension) + void expansion(int max_dim) + void remove_maximal_simplex(vector[int] simplex) + bool prune_above_filtration(double filtration) + bool make_filtration_non_decreasing() diff --git a/src/cython/gudhi/simplex_tree.pyx b/src/cython/gudhi/simplex_tree.pyx new file mode 100644 index 00000000..e5f9e9d1 --- /dev/null +++ b/src/cython/gudhi/simplex_tree.pyx @@ -0,0 +1,518 @@ +from libc.stdint cimport intptr_t +from numpy import array as np_array + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": + cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": + Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max) + vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) + vector[int] betti_numbers() + vector[int] persistent_betti_numbers(double from_value, double to_value) + vector[pair[double,double]] intervals_in_dimension(int dimension) + void write_output_diagram(string diagram_file_name) + vector[pair[vector[int], vector[int]]] persistence_pairs() + +# SimplexTree python interface +cdef class SimplexTree: + """The simplex tree is an efficient and flexible data structure for + representing general (filtered) simplicial complexes. The data structure + is described in Jean-Daniel Boissonnat and Clément Maria. The Simplex + Tree: An Efficient Data Structure for General Simplicial Complexes. + Algorithmica, pages 1–22, 2014. + + This class is a filtered, with keys, and non contiguous vertices version + of the simplex tree. + """ + # unfortunately 'cdef public Simplex_tree_interface_full_featured* thisptr' is not possible + # Use intptr_t instead to cast the pointer + cdef public intptr_t thisptr + + # Get the pointer casted as it should be + cdef Simplex_tree_interface_full_featured* get_ptr(self): + return (self.thisptr) + + cdef Simplex_tree_persistence_interface * pcohptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self): + """SimplexTree constructor. + """ + + # The real cython constructor + def __cinit__(self): + cdef Simplex_tree_interface_full_featured* ptr = new Simplex_tree_interface_full_featured() + self.thisptr = ptr + + def __dealloc__(self): + cdef Simplex_tree_interface_full_featured* ptr = self.get_ptr() + if ptr != NULL: + del ptr + if self.pcohptr != NULL: + del self.pcohptr + + def __is_defined(self): + """Returns true if SimplexTree pointer is not NULL. + """ + return self.get_ptr() != NULL + + def __is_persistence_defined(self): + """Returns true if Persistence pointer is not NULL. + """ + return self.pcohptr != NULL + + def filtration(self, simplex): + """This function returns the filtration value for a given N-simplex in + this simplicial complex, or +infinity if it is not in the complex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :returns: The simplicial complex filtration value. + :rtype: float + """ + return self.get_ptr().simplex_filtration(simplex) + + def assign_filtration(self, simplex, filtration): + """This function assigns the simplicial complex filtration value for a + given N-simplex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :param filtration: The simplicial complex filtration value. + :type filtration: float + """ + self.get_ptr().assign_simplex_filtration(simplex, filtration) + + def initialize_filtration(self): + """This function initializes and sorts the simplicial complex + filtration vector. + + .. note:: + + This function must be launched before + :func:`persistence()`, + :func:`betti_numbers()`, + :func:`persistent_betti_numbers()`, + or :func:`get_filtration()` + after :func:`inserting` or + :func:`removing` + simplices. + """ + self.get_ptr().initialize_filtration() + + def num_vertices(self): + """This function returns the number of vertices of the simplicial + complex. + + :returns: The simplicial complex number of vertices. + :rtype: int + """ + return self.get_ptr().num_vertices() + + def num_simplices(self): + """This function returns the number of simplices of the simplicial + complex. + + :returns: the simplicial complex number of simplices. + :rtype: int + """ + return self.get_ptr().num_simplices() + + def dimension(self): + """This function returns the dimension of the simplicial complex. + + :returns: the simplicial complex dimension. + :rtype: int + + .. note:: + + This function is not constant time because it can recompute + dimension if required (can be triggered by + :func:`remove_maximal_simplex()` + or + :func:`prune_above_filtration()` + methods). + """ + return self.get_ptr().dimension() + + def upper_bound_dimension(self): + """This function returns a valid dimension upper bound of the + simplicial complex. + + :returns: an upper bound on the dimension of the simplicial complex. + :rtype: int + """ + return self.get_ptr().upper_bound_dimension() + + def set_dimension(self, dimension): + """This function sets the dimension of the simplicial complex. + + :param dimension: The new dimension value. + :type dimension: int. + + .. note:: + + This function must be used with caution because it disables + dimension recomputation when required + (this recomputation can be triggered by + :func:`remove_maximal_simplex()` + or + :func:`prune_above_filtration()` + ). + """ + self.get_ptr().set_dimension(dimension) + + def find(self, simplex): + """This function returns if the N-simplex was found in the simplicial + complex or not. + + :param simplex: The N-simplex to find, represented by a list of vertex. + :type simplex: list of int. + :returns: true if the simplex was found, false otherwise. + :rtype: bool + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + return self.get_ptr().find_simplex(csimplex) + + def insert(self, simplex, filtration=0.0): + """This function inserts the given N-simplex and its subfaces with the + given filtration value (default value is '0.0'). If some of those + simplices are already present with a higher filtration value, their + filtration value is lowered. + + :param simplex: The N-simplex to insert, represented by a list of + vertex. + :type simplex: list of int. + :param filtration: The filtration value of the simplex. + :type filtration: float. + :returns: true if the simplex was not yet in the complex, false + otherwise (whatever its original filtration value). + :rtype: bool + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + return self.get_ptr().insert_simplex_and_subfaces(csimplex, + filtration) + + def get_filtration(self): + """This function returns a list of all simplices with their given + filtration values. + + :returns: The simplices sorted by increasing filtration values. + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[pair[vector[int], double]] filtration \ + = self.get_ptr().get_filtration() + ct = [] + for filtered_complex in filtration: + v = [] + for vertex in filtered_complex.first: + v.append(vertex) + ct.append((v, filtered_complex.second)) + return ct + + def get_skeleton(self, dimension): + """This function returns the (simplices of the) skeleton of a maximum + given dimension. + + :param dimension: The skeleton dimension value. + :type dimension: int. + :returns: The (simplices of the) skeleton of a maximum dimension. + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[pair[vector[int], double]] skeleton \ + = self.get_ptr().get_skeleton(dimension) + ct = [] + for filtered_simplex in skeleton: + v = [] + for vertex in filtered_simplex.first: + v.append(vertex) + ct.append((v, filtered_simplex.second)) + return ct + + def get_star(self, simplex): + """This function returns the star of a given N-simplex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :returns: The (simplices of the) star of a simplex. + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + cdef vector[pair[vector[int], double]] star \ + = self.get_ptr().get_star(csimplex) + ct = [] + for filtered_simplex in star: + v = [] + for vertex in filtered_simplex.first: + v.append(vertex) + ct.append((v, filtered_simplex.second)) + return ct + + def get_cofaces(self, simplex, codimension): + """This function returns the cofaces of a given N-simplex with a + given codimension. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :param codimension: The codimension. If codimension = 0, all cofaces + are returned (equivalent of get_star function) + :type codimension: int. + :returns: The (simplices of the) cofaces of a simplex + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + cdef vector[pair[vector[int], double]] cofaces \ + = self.get_ptr().get_cofaces(csimplex, codimension) + ct = [] + for filtered_simplex in cofaces: + v = [] + for vertex in filtered_simplex.first: + v.append(vertex) + ct.append((v, filtered_simplex.second)) + return ct + + def remove_maximal_simplex(self, simplex): + """This function removes a given maximal N-simplex from the simplicial + complex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + + .. note:: + + Be aware that removing is shifting data in a flat_map + (:func:`initialize_filtration()` to be done). + + .. note:: + + The dimension of the simplicial complex may be lower after calling + remove_maximal_simplex than it was before. However, + :func:`upper_bound_dimension()` + method will return the old value, which + remains a valid upper bound. If you care, you can call + :func:`dimension()` + to recompute the exact dimension. + """ + self.get_ptr().remove_maximal_simplex(simplex) + + def prune_above_filtration(self, filtration): + """Prune above filtration value given as parameter. + + :param filtration: Maximum threshold value. + :type filtration: float. + :returns: The filtration modification information. + :rtype: bool + + + .. note:: + + Some simplex tree functions require the filtration to be valid. + prune_above_filtration function is not launching + :func:`initialize_filtration()` + but returns the filtration modification + information. If the complex has changed , please call + :func:`initialize_filtration()` + to recompute it. + + .. note:: + + Note that the dimension of the simplicial complex may be lower + after calling + :func:`prune_above_filtration()` + than it was before. However, + :func:`upper_bound_dimension()` + will return the old value, which remains a + valid upper bound. If you care, you can call + :func:`dimension()` + method to recompute the exact dimension. + """ + return self.get_ptr().prune_above_filtration(filtration) + + def expansion(self, max_dim): + """Expands the Simplex_tree containing only its one skeleton + until dimension max_dim. + + The expanded simplicial complex until dimension :math:`d` + attached to a graph :math:`G` is the maximal simplicial complex of + dimension at most :math:`d` admitting the graph :math:`G` as + :math:`1`-skeleton. + The filtration value assigned to a simplex is the maximal filtration + value of one of its edges. + + The Simplex_tree must contain no simplex of dimension bigger than + 1 when calling the method. + + :param max_dim: The maximal dimension. + :type max_dim: int. + """ + self.get_ptr().expansion(max_dim) + + def make_filtration_non_decreasing(self): + """This function ensures that each simplex has a higher filtration + value than its faces by increasing the filtration values. + + :returns: True if any filtration value was modified, + False if the filtration was already non-decreasing. + :rtype: bool + + + .. note:: + + Some simplex tree functions require the filtration to be valid. + make_filtration_non_decreasing function is not launching + :func:`initialize_filtration()` + but returns the filtration modification + information. If the complex has changed , please call + :func:`initialize_filtration()` + to recompute it. + """ + return self.get_ptr().make_filtration_non_decreasing() + + def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False): + """This function returns the persistence of the simplicial complex. + + :param homology_coeff_field: The homology coefficient field. Must be a + prime number. Default value is 11. + :type homology_coeff_field: int. + :param min_persistence: The minimum persistence value to take into + account (strictly greater than min_persistence). Default value is + 0.0. + Sets min_persistence to -1.0 to see all values. + :type min_persistence: float. + :param persistence_dim_max: If true, the persistent homology for the + maximal dimension in the complex is computed. If false, it is + ignored. Default is false. + :type persistence_dim_max: bool + :returns: The persistence of the simplicial complex. + :rtype: list of pairs(dimension, pair(birth, death)) + """ + if self.pcohptr != NULL: + del self.pcohptr + self.pcohptr = new Simplex_tree_persistence_interface(self.get_ptr(), persistence_dim_max) + cdef vector[pair[int, pair[double, double]]] persistence_result + if self.pcohptr != NULL: + persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) + return persistence_result + + def betti_numbers(self): + """This function returns the Betti numbers of the simplicial complex. + + :returns: The Betti numbers ([B0, B1, ..., Bn]). + :rtype: list of int + + :note: betti_numbers function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[int] bn_result + if self.pcohptr != NULL: + bn_result = self.pcohptr.betti_numbers() + else: + print("betti_numbers function requires persistence function" + " to be launched first.") + return bn_result + + def persistent_betti_numbers(self, from_value, to_value): + """This function returns the persistent Betti numbers of the + simplicial complex. + + :param from_value: The persistence birth limit to be added in the + numbers (persistent birth <= from_value). + :type from_value: float. + :param to_value: The persistence death limit to be added in the + numbers (persistent death > to_value). + :type to_value: float. + + :returns: The persistent Betti numbers ([B0, B1, ..., Bn]). + :rtype: list of int + + :note: persistent_betti_numbers function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[int] pbn_result + if self.pcohptr != NULL: + pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) + else: + print("persistent_betti_numbers function requires persistence function" + " to be launched first.") + return pbn_result + + def persistence_intervals_in_dimension(self, dimension): + """This function returns the persistence intervals of the simplicial + complex in a specific dimension. + + :param dimension: The specific dimension. + :type dimension: int. + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + + :note: intervals_in_dim function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[pair[double,double]] intervals_result + if self.pcohptr != NULL: + intervals_result = self.pcohptr.intervals_in_dimension(dimension) + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") + return np_array(intervals_result) + + def persistence_pairs(self): + """This function returns a list of persistence birth and death simplices pairs. + + :returns: A list of persistence simplices intervals. + :rtype: list of pair of list of int + + :note: persistence_pairs function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[pair[vector[int],vector[int]]] persistence_pairs_result + if self.pcohptr != NULL: + persistence_pairs_result = self.pcohptr.persistence_pairs() + else: + print("persistence_pairs function requires persistence function" + " to be launched first.") + return persistence_pairs_result + + def write_persistence_diagram(self, persistence_file=''): + """This function writes the persistence intervals of the simplicial + complex in a user given file name. + + :param persistence_file: The specific dimension. + :type persistence_file: string. + + :note: intervals_in_dim function requires + :func:`persistence()` + function to be launched first. + """ + if self.pcohptr != NULL: + if persistence_file != '': + self.pcohptr.write_output_diagram(str.encode(persistence_file)) + else: + print("persistence_file must be specified") + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") diff --git a/src/cython/gudhi/strong_witness_complex.pyx b/src/cython/gudhi/strong_witness_complex.pyx new file mode 100644 index 00000000..4e3d1b67 --- /dev/null +++ b/src/cython/gudhi/strong_witness_complex.pyx @@ -0,0 +1,78 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface": + Strong_witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + +# StrongWitnessComplex python interface +cdef class StrongWitnessComplex: + """Constructs (strong) witness complex for a given table of nearest + landmarks with respect to witnesses. + """ + + cdef Strong_witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, nearest_landmark_table=None): + """StrongWitnessComplex constructor. + + :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. + `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to + witness w, and d is the (squared) distance between l and w. + :type nearest_landmark_table: list of list of pair of int and float + """ + + # The real cython constructor + def __cinit__(self, nearest_landmark_table=None): + if nearest_landmark_table is not None: + self.thisptr = new Strong_witness_complex_interface(nearest_landmark_table) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if StrongWitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): + """ + :param max_alpha_square: The maximum relaxation parameter. + Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(stree_ptr, + max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + return stree diff --git a/src/cython/gudhi/subsampling.pyx b/src/cython/gudhi/subsampling.pyx new file mode 100644 index 00000000..1135c1fb --- /dev/null +++ b/src/cython/gudhi/subsampling.pyx @@ -0,0 +1,130 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.string cimport string +from libcpp cimport bool +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling": + vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points) + vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points, unsigned starting_point) + vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points) + vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points, unsigned starting_point) + vector[vector[double]] subsampling_n_random_points(vector[vector[double]] points, unsigned nb_points) + vector[vector[double]] subsampling_n_random_points_from_file(string off_file, unsigned nb_points) + vector[vector[double]] subsampling_sparsify_points(vector[vector[double]] points, double min_squared_dist) + vector[vector[double]] subsampling_sparsify_points_from_file(string off_file, double min_squared_dist) + +def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_point = ''): + """Subsample by a greedy strategy of iteratively adding the farthest point + from the current chosen point set to the subsampling. + The iteration starts with the landmark `starting point`. + + :param points: The input point set. + :type points: vector[vector[double]]. + + Or + + :param off_file: An OFF file style name. + :type off_file: string + + :param nb_points: Number of points of the subsample. + :type nb_points: unsigned. + :param starting_point: The iteration starts with the landmark `starting \ + point`,which is the index of the poit to start with. If not set, this \ + index is choosen randomly. + :type starting_point: unsigned. + :returns: The subsample point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + if starting_point is '': + return subsampling_n_farthest_points_from_file(str.encode(off_file), + nb_points) + else: + return subsampling_n_farthest_points_from_file(str.encode(off_file), + nb_points, + starting_point) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty points + points=[] + if starting_point is '': + return subsampling_n_farthest_points(points, nb_points) + else: + return subsampling_n_farthest_points(points, nb_points, + starting_point) + +def pick_n_random_points(points=None, off_file='', nb_points=0): + """Subsample a point set by picking random vertices. + + :param points: The input point set. + :type points: vector[vector[double]]. + + Or + + :param off_file: An OFF file style name. + :type off_file: string + + :param nb_points: Number of points of the subsample. + :type nb_points: unsigned. + :returns: The subsample point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + return subsampling_n_random_points_from_file(str.encode(off_file), + nb_points) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty points + points=[] + return subsampling_n_random_points(points, nb_points) + +def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0): + """Outputs a subset of the input points so that the squared distance + between any two points is greater than or equal to min_squared_dist. + + :param points: The input point set. + :type points: vector[vector[double]]. + + Or + + :param off_file: An OFF file style name. + :type off_file: string + + :param min_squared_dist: Minimum squared distance separating the output \ + points. + :type min_squared_dist: float. + :returns: The subsample point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + return subsampling_sparsify_points_from_file(str.encode(off_file), + min_squared_dist) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty points + points=[] + return subsampling_sparsify_points(points, min_squared_dist) diff --git a/src/cython/gudhi/tangential_complex.pyx b/src/cython/gudhi/tangential_complex.pyx new file mode 100644 index 00000000..b2d55520 --- /dev/null +++ b/src/cython/gudhi/tangential_complex.pyx @@ -0,0 +1,177 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +from libc.stdint cimport intptr_t +import os + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Tangential_complex_interface.h" namespace "Gudhi": + cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface": + Tangential_complex_interface(int intrisic_dim, vector[vector[double]] points) + # bool from_file is a workaround for cython to find the correct signature + Tangential_complex_interface(int intrisic_dim, string off_file, bool from_file) + void compute_tangential_complex() except + + vector[double] get_point(unsigned vertex) + unsigned number_of_vertices() + unsigned number_of_simplices() + unsigned number_of_inconsistent_simplices() + unsigned number_of_inconsistent_stars() + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) + void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) + void set_max_squared_edge_length(double max_squared_edge_length) + +# TangentialComplex python interface +cdef class TangentialComplex: + """The class Tangential_complex represents a tangential complex. After the + computation of the complex, an optional post-processing called perturbation + can be run to attempt to remove inconsistencies. + """ + + cdef Tangential_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, intrisic_dim, points=None, off_file=''): + """TangentialComplex constructor. + + :param intrisic_dim: Intrinsic dimension of the manifold. + :type intrisic_dim: integer + + :param points: A list of points in d-Dimension. + :type points: list of list of double + + Or + + :param off_file: An OFF file style name. + :type off_file: string + """ + + # The real cython constructor + def __cinit__(self, intrisic_dim, points=None, off_file=''): + if off_file is not '': + if os.path.isfile(off_file): + self.thisptr = new Tangential_complex_interface(intrisic_dim, str.encode(off_file), True) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty tangential construction + points=[] + self.thisptr = new Tangential_complex_interface(intrisic_dim, points) + + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if TangentialComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def compute_tangential_complex(self): + """This function computes the tangential complex. + + Raises: + ValueError: In debug mode, if the computed star dimension is too + low. Try to set a bigger maximal edge length value with + :func:`~gudhi.Tangential_complex.set_max_squared_edge_length` + if this happens. + """ + self.thisptr.compute_tangential_complex() + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int. + :returns: The point. + :rtype: list of float + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + + def num_vertices(self): + """ + :returns: The number of vertices. + :rtype: unsigned + """ + return self.thisptr.number_of_vertices() + + def num_simplices(self): + """ + :returns: Total number of simplices in stars (including duplicates that appear in several stars). + :rtype: unsigned + """ + return self.thisptr.number_of_simplices() + + def num_inconsistent_simplices(self): + """ + :returns: The number of inconsistent simplices. + :rtype: unsigned + """ + return self.thisptr.number_of_inconsistent_simplices() + + def num_inconsistent_stars(self): + """ + :returns: The number of stars containing at least one inconsistent simplex. + :rtype: unsigned + """ + return self.thisptr.number_of_inconsistent_stars() + + def create_simplex_tree(self): + """Exports the complex into a simplex tree. + + :returns: A simplex tree created from the complex. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr + self.thisptr.create_simplex_tree(stree_ptr) + return stree + simplex_tree = SimplexTree() + self.thisptr.create_simplex_tree(simplex_tree.thisptr) + return simplex_tree + + def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0): + """Attempts to fix inconsistencies by perturbing the point positions. + + :param max_perturb: Maximum length of the translations used by the + perturbation. + :type max_perturb: double + :param time_limit: Time limit in seconds. If -1, no time limit is set. + :type time_limit: double + """ + self.thisptr.fix_inconsistencies_using_perturbation(max_perturb, + time_limit) + + def set_max_squared_edge_length(self, max_squared_edge_length): + """Sets the maximal possible squared edge length for the edges in the + triangulations. + + :param max_squared_edge_length: Maximal possible squared edge length. + :type max_squared_edge_length: double + + If the maximal edge length value is too low + :func:`~gudhi.Tangential_complex.compute_tangential_complex` + will throw an exception in debug mode. + """ + self.thisptr.set_max_squared_edge_length(max_squared_edge_length) diff --git a/src/cython/gudhi/witness_complex.pyx b/src/cython/gudhi/witness_complex.pyx new file mode 100644 index 00000000..c859877d --- /dev/null +++ b/src/cython/gudhi/witness_complex.pyx @@ -0,0 +1,78 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface": + Witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + +# WitnessComplex python interface +cdef class WitnessComplex: + """Constructs (weak) witness complex for a given table of nearest landmarks + with respect to witnesses. + """ + + cdef Witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, nearest_landmark_table=None): + """WitnessComplex constructor. + + :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. + `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to + witness w, and d is the (squared) distance between l and w. + :type nearest_landmark_table: list of list of pair of int and float + """ + + # The real cython constructor + def __cinit__(self, nearest_landmark_table=None): + if nearest_landmark_table is not None: + self.thisptr = new Witness_complex_interface(nearest_landmark_table) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if WitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): + """ + :param max_alpha_square: The maximum relaxation parameter. + Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(stree_ptr, + max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + return stree -- cgit v1.2.3 From b8b39d8411f4dfd1fc2a113a473b3f31cda9bb34 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 10 Jul 2019 15:06:58 +0200 Subject: Copy warning about max_alpha_square from C++ doc to python doc. --- src/cython/cython/alpha_complex.pyx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/cython/cython/alpha_complex.pyx b/src/cython/cython/alpha_complex.pyx index 249d51d0..26cde0a1 100644 --- a/src/cython/cython/alpha_complex.pyx +++ b/src/cython/cython/alpha_complex.pyx @@ -99,7 +99,9 @@ cdef class AlphaComplex: def create_simplex_tree(self, max_alpha_square=float('inf')): """ :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity. + simplices shall not exceed. Default is set to infinity, and + there is very little point using anything else since it does + not save time. :type max_alpha_square: float :returns: A simplex tree created from the Delaunay Triangulation. :rtype: SimplexTree -- cgit v1.2.3 From 003e9b3e127a3f34c03872b0cc314d0dcbc04bcf Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 2 Aug 2019 11:25:52 +0200 Subject: First version that compiles the simplex_tree module --- src/cython/gudhi/simplex_tree.pxd | 10 ++++++++++ src/cython/gudhi/simplex_tree.pyx | 11 +---------- src/cython/setup.py.in | 6 ++++-- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/cython/gudhi/simplex_tree.pxd b/src/cython/gudhi/simplex_tree.pxd index 25051295..5f86cfe2 100644 --- a/src/cython/gudhi/simplex_tree.pxd +++ b/src/cython/gudhi/simplex_tree.pxd @@ -44,3 +44,13 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": void remove_maximal_simplex(vector[int] simplex) bool prune_above_filtration(double filtration) bool make_filtration_non_decreasing() + +cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": + cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": + Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max) + vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) + vector[int] betti_numbers() + vector[int] persistent_betti_numbers(double from_value, double to_value) + vector[pair[double,double]] intervals_in_dimension(int dimension) + void write_output_diagram(string diagram_file_name) + vector[pair[vector[int], vector[int]]] persistence_pairs() diff --git a/src/cython/gudhi/simplex_tree.pyx b/src/cython/gudhi/simplex_tree.pyx index e5f9e9d1..604328e9 100644 --- a/src/cython/gudhi/simplex_tree.pyx +++ b/src/cython/gudhi/simplex_tree.pyx @@ -1,5 +1,6 @@ from libc.stdint cimport intptr_t from numpy import array as np_array +from simplex_tree cimport * """ This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. @@ -15,16 +16,6 @@ __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": - Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max) - vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) - vector[int] betti_numbers() - vector[int] persistent_betti_numbers(double from_value, double to_value) - vector[pair[double,double]] intervals_in_dimension(int dimension) - void write_output_diagram(string diagram_file_name) - vector[pair[vector[int], vector[int]]] persistence_pairs() - # SimplexTree python interface cdef class SimplexTree: """The simplex tree is an efficient and flexible data structure for diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in index 454be9af..f158f6cb 100644 --- a/src/cython/setup.py.in +++ b/src/cython/setup.py.in @@ -18,23 +18,25 @@ __license__ = "MIT" simplextree = Extension( "gudhi.simplextree", - sources = ['@CMAKE_CURRENT_SOURCE_DIR@/cython/simplex_tree.pyx',], + sources = ['@CMAKE_CURRENT_SOURCE_DIR@/gudhi/simplex_tree.pyx',], language = 'c++', extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@], extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@], libraries=[@GUDHI_CYTHON_LIBRARIES@], library_dirs=[@GUDHI_CYTHON_LIBRARY_DIRS@], - include_dirs = [numpy_get_include(), @GUDHI_CYTHON_INCLUDE_DIRS@], + include_dirs = [numpy_get_include(), '@CMAKE_CURRENT_SOURCE_DIR@/gudhi/', @GUDHI_CYTHON_INCLUDE_DIRS@], runtime_library_dirs=[@GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS@], ) setup( name = 'gudhi', + packages=["gudhi",], author='GUDHI Editorial Board', author_email='gudhi-contact@lists.gforge.inria.fr', version='@GUDHI_VERSION@', url='http://gudhi.gforge.inria.fr/', ext_modules = cythonize(simplextree), +# cmdclass = {'build_ext': build_ext}, install_requires = ['cython','numpy >= 1.9',], setup_requires = ['numpy >= 1.9',], ) -- cgit v1.2.3 From 48dfd910463c33e0e331f84e151ac7fe1f93dbe2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 6 Aug 2019 14:13:52 +0200 Subject: First version with only simplex tree that compiles. Cannot find how not to generate .cpp in sources --- .gitignore | 3 +++ src/cython/gudhi/__init__.py | 1 + src/cython/gudhi/simplex_tree.pyx | 2 +- src/cython/setup.py.in | 8 ++++---- 4 files changed, 9 insertions(+), 5 deletions(-) create mode 100644 src/cython/gudhi/__init__.py diff --git a/.gitignore b/.gitignore index 5c2195be..3c47ca9a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ # Classical CMake build directory build/ +# Generated by Cython +src/cython/gudhi/simplex_tree.cpp + # Generated by tests data/points/COIL_database/lucky_cat.off_dist data/points/COIL_database/lucky_cat.off_sc.dot diff --git a/src/cython/gudhi/__init__.py b/src/cython/gudhi/__init__.py new file mode 100644 index 00000000..fde749eb --- /dev/null +++ b/src/cython/gudhi/__init__.py @@ -0,0 +1 @@ +# Fake empty __init__.py for cython to accept this directory as a Python package diff --git a/src/cython/gudhi/simplex_tree.pyx b/src/cython/gudhi/simplex_tree.pyx index 604328e9..51134681 100644 --- a/src/cython/gudhi/simplex_tree.pyx +++ b/src/cython/gudhi/simplex_tree.pyx @@ -1,6 +1,6 @@ from libc.stdint cimport intptr_t from numpy import array as np_array -from simplex_tree cimport * +cimport simplex_tree """ This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in index f158f6cb..3c37664f 100644 --- a/src/cython/setup.py.in +++ b/src/cython/setup.py.in @@ -16,8 +16,8 @@ __author__ = "GUDHI Editorial Board" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -simplextree = Extension( - "gudhi.simplextree", +simplex_tree = Extension( + "gudhi.simplex_tree", sources = ['@CMAKE_CURRENT_SOURCE_DIR@/gudhi/simplex_tree.pyx',], language = 'c++', extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@], @@ -35,8 +35,8 @@ setup( author_email='gudhi-contact@lists.gforge.inria.fr', version='@GUDHI_VERSION@', url='http://gudhi.gforge.inria.fr/', - ext_modules = cythonize(simplextree), -# cmdclass = {'build_ext': build_ext}, + ext_modules = cythonize(simplex_tree), +# cmdclass={'build_ext': Cython.Build.build_ext}, install_requires = ['cython','numpy >= 1.9',], setup_requires = ['numpy >= 1.9',], ) -- cgit v1.2.3 From 9b40c817277fa1de6c2b2e7b796ad0157ace4c61 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 6 Aug 2019 17:11:49 +0200 Subject: Add all modules that do not depend on Simplex tree --- .gitignore | 9 +++++++++ src/cython/CMakeLists.txt | 30 ++++++++++++++++-------------- src/cython/gudhi/__init__.py.in | 2 +- src/cython/setup.py.in | 39 +++++++++++++++++++++++++-------------- 4 files changed, 51 insertions(+), 29 deletions(-) diff --git a/.gitignore b/.gitignore index 3c47ca9a..2b1817a4 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,15 @@ build/ # Generated by Cython src/cython/gudhi/simplex_tree.cpp +src/cython/gudhi/alpha_complex.cpp +src/cython/gudhi/bottleneck_distance.cpp +src/cython/gudhi/cubical_complex.cpp +src/cython/gudhi/euclidean_strong_witness_complex.cpp +src/cython/gudhi/off_reader.cpp +src/cython/gudhi/periodic_cubical_complex.cpp +src/cython/gudhi/reader_utils.cpp +src/cython/gudhi/rips_complex.cpp +src/cython/gudhi/subsampling.cpp # Generated by tests data/points/COIL_database/lucky_cat.off_dist diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index cd99f70b..12eec9d9 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -26,7 +26,7 @@ function( add_gudhi_py_test THE_TEST ) endfunction( add_gudhi_py_test ) # Set gudhi.__debug_info__ -# WARNING : to be done before gudhi.pyx.in configure_file +# WARNING : to be done before setup.py.in configure_file function( add_gudhi_debug_info DEBUG_INFO ) set(GUDHI_CYTHON_DEBUG_INFO "${GUDHI_CYTHON_DEBUG_INFO} \"${DEBUG_INFO}\\n\" \\\n" PARENT_SCOPE) endfunction( add_gudhi_debug_info ) @@ -94,20 +94,24 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ") endif (EIGEN3_FOUND) + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'off_reader', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'simplex_tree', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'rips_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'cubical_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'periodic_cubical_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'reader_utils', ") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'witness_complex', ") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'strong_witness_complex', ") if (NOT CGAL_VERSION VERSION_LESS 4.11.0) - set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/bottleneck_distance.pyx'") - set(GUDHI_CYTHON_NERVE_GIC "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/nerve_gic.pyx'") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'bottleneck_distance', ") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'nerve_gic', ") endif () if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - set(GUDHI_CYTHON_SUBSAMPLING "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/subsampling.pyx'") - set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/tangential_complex.pyx'") - endif () - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - set(GUDHI_CYTHON_ALPHA_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/alpha_complex.pyx'") - endif () - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX - "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_witness_complex.pyx'\ninclude '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_strong_witness_complex.pyx'\n") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'subsampling', ") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'tangential_complex', ") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'alpha_complex', ") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_witness_complex', ") + #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_strong_witness_complex', ") endif () if(CGAL_FOUND) @@ -190,8 +194,6 @@ if(PYTHONINTERP_FOUND) # Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention configure_file(setup.py.in "${CMAKE_CURRENT_BINARY_DIR}/setup.py" @ONLY) - # Generate gudhi.pyx - Gudhi cython file - configure_file(gudhi.pyx.in "${CMAKE_CURRENT_BINARY_DIR}/gudhi.pyx" @ONLY) # Generate gudhi/__init__.py file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi") diff --git a/src/cython/gudhi/__init__.py.in b/src/cython/gudhi/__init__.py.in index 60ad7865..b2d2d3d7 100644 --- a/src/cython/gudhi/__init__.py.in +++ b/src/cython/gudhi/__init__.py.in @@ -10,7 +10,7 @@ from importlib import import_module - YYYY/MM Author: Description of the modification """ -__author__ = "Vincent Rouvreau" +__author__ = "GUDHI Editorial Board" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "https://gudhi.inria.fr/licensing/" __version__ = "@GUDHI_VERSION@" diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in index 3c37664f..fd4307e3 100644 --- a/src/cython/setup.py.in +++ b/src/cython/setup.py.in @@ -12,21 +12,33 @@ from numpy import get_include as numpy_get_include - YYYY/MM Author: Description of the modification """ -__author__ = "GUDHI Editorial Board" +__author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -simplex_tree = Extension( - "gudhi.simplex_tree", - sources = ['@CMAKE_CURRENT_SOURCE_DIR@/gudhi/simplex_tree.pyx',], - language = 'c++', - extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@], - extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@], - libraries=[@GUDHI_CYTHON_LIBRARIES@], - library_dirs=[@GUDHI_CYTHON_LIBRARY_DIRS@], - include_dirs = [numpy_get_include(), '@CMAKE_CURRENT_SOURCE_DIR@/gudhi/', @GUDHI_CYTHON_INCLUDE_DIRS@], - runtime_library_dirs=[@GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS@], -) +modules = [@GUDHI_CYTHON_MODULES_TO_COMPILE@] + +source_dir='@CMAKE_CURRENT_SOURCE_DIR@/gudhi/' +extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@] +extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@] +libraries=[@GUDHI_CYTHON_LIBRARIES@] +library_dirs=[@GUDHI_CYTHON_LIBRARY_DIRS@] +include_dirs = [numpy_get_include(), '@CMAKE_CURRENT_SOURCE_DIR@/gudhi/', @GUDHI_CYTHON_INCLUDE_DIRS@] +runtime_library_dirs=[@GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS@] + +# Create ext_modules list from module list +ext_modules = [] +for module in modules: + ext_modules.append(Extension( + 'gudhi.' + module, + sources = [source_dir + module + '.pyx',], + language = 'c++', + extra_compile_args=extra_compile_args, + extra_link_args=extra_link_args, + libraries=libraries, + library_dirs=library_dirs, + include_dirs=include_dirs, + runtime_library_dirs=runtime_library_dirs,)) setup( name = 'gudhi', @@ -35,8 +47,7 @@ setup( author_email='gudhi-contact@lists.gforge.inria.fr', version='@GUDHI_VERSION@', url='http://gudhi.gforge.inria.fr/', - ext_modules = cythonize(simplex_tree), -# cmdclass={'build_ext': Cython.Build.build_ext}, + ext_modules = cythonize(ext_modules), install_requires = ['cython','numpy >= 1.9',], setup_requires = ['numpy >= 1.9',], ) -- cgit v1.2.3 From d7e1a3a81dfa91a29e433887fcfdc73310f8db32 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 7 Aug 2019 11:34:57 +0200 Subject: Add the rest of modules --- .gitignore | 5 ++++ src/cython/CMakeLists.txt | 16 +++++------ src/cython/gudhi.pyx.in | 33 ---------------------- .../gudhi/euclidean_strong_witness_complex.pyx | 14 ++++++--- src/cython/gudhi/euclidean_witness_complex.pyx | 14 ++++++--- src/cython/gudhi/nerve_gic.pyx | 12 ++++++-- src/cython/gudhi/tangential_complex.pyx | 3 -- 7 files changed, 42 insertions(+), 55 deletions(-) delete mode 100644 src/cython/gudhi.pyx.in diff --git a/.gitignore b/.gitignore index 2b1817a4..8f8a8e6d 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,11 @@ src/cython/gudhi/periodic_cubical_complex.cpp src/cython/gudhi/reader_utils.cpp src/cython/gudhi/rips_complex.cpp src/cython/gudhi/subsampling.cpp +src/cython/gudhi/euclidean_witness_complex.cpp +src/cython/gudhi/nerve_gic.cpp +src/cython/gudhi/strong_witness_complex.cpp +src/cython/gudhi/tangential_complex.cpp +src/cython/gudhi/witness_complex.cpp # Generated by tests data/points/COIL_database/lucky_cat.off_dist diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index 12eec9d9..dc0dd59d 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -100,18 +100,18 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'cubical_complex', ") set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'periodic_cubical_complex', ") set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'reader_utils', ") - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'witness_complex', ") - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'strong_witness_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'witness_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'strong_witness_complex', ") if (NOT CGAL_VERSION VERSION_LESS 4.11.0) - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'bottleneck_distance', ") - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'nerve_gic', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'bottleneck_distance', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'nerve_gic', ") endif () if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'subsampling', ") - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'tangential_complex', ") - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'alpha_complex', ") - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_witness_complex', ") - #set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_strong_witness_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'tangential_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'alpha_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_witness_complex', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_strong_witness_complex', ") endif () if(CGAL_FOUND) diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in deleted file mode 100644 index 1c380308..00000000 --- a/src/cython/gudhi.pyx.in +++ /dev/null @@ -1,33 +0,0 @@ -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016-2019 Inria" -__license__ = "https://gudhi.inria.fr/licensing/" -__version__ = "@GUDHI_VERSION@" -# This variable is used by doctest to find files -__root_source_dir__ = "@CMAKE_SOURCE_DIR@" -__debug_info__ = @GUDHI_CYTHON_DEBUG_INFO@ - -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/off_reader.pyx' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/simplex_tree.pyx' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/rips_complex.pyx' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/cubical_complex.pyx' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/periodic_cubical_complex.pyx' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/persistence_graphical_tools.py' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/reader_utils.pyx' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/witness_complex.pyx' -include '@CMAKE_CURRENT_SOURCE_DIR@/cython/strong_witness_complex.pyx' -@GUDHI_CYTHON_ALPHA_COMPLEX@ -@GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX@ -@GUDHI_CYTHON_SUBSAMPLING@ -@GUDHI_CYTHON_TANGENTIAL_COMPLEX@ -@GUDHI_CYTHON_BOTTLENECK_DISTANCE@ -@GUDHI_CYTHON_NERVE_GIC@ diff --git a/src/cython/gudhi/euclidean_strong_witness_complex.pyx b/src/cython/gudhi/euclidean_strong_witness_complex.pyx index 26bd8375..465635c4 100644 --- a/src/cython/gudhi/euclidean_strong_witness_complex.pyx +++ b/src/cython/gudhi/euclidean_strong_witness_complex.pyx @@ -1,6 +1,10 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree """ This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. @@ -65,12 +69,14 @@ cdef class EuclideanStrongWitnessComplex: :returns: A simplex tree created from the Delaunay Triangulation. :rtype: SimplexTree """ - simplex_tree = SimplexTree() + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr if limit_dimension is not -1: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) + self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square, limit_dimension) else: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) - return simplex_tree + self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + return stree def get_point(self, vertex): """This function returns the point corresponding to a given vertex. diff --git a/src/cython/gudhi/euclidean_witness_complex.pyx b/src/cython/gudhi/euclidean_witness_complex.pyx index e687c6f3..92d54eb5 100644 --- a/src/cython/gudhi/euclidean_witness_complex.pyx +++ b/src/cython/gudhi/euclidean_witness_complex.pyx @@ -1,6 +1,10 @@ from cython cimport numeric from libcpp.vector cimport vector from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree """ This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. @@ -65,12 +69,14 @@ cdef class EuclideanWitnessComplex: :returns: A simplex tree created from the Delaunay Triangulation. :rtype: SimplexTree """ - simplex_tree = SimplexTree() + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr if limit_dimension is not -1: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension) + self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square, limit_dimension) else: - self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square) - return simplex_tree + self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + return stree def get_point(self, vertex): """This function returns the point corresponding to a given vertex. diff --git a/src/cython/gudhi/nerve_gic.pyx b/src/cython/gudhi/nerve_gic.pyx index 3c8f1200..9fec626f 100644 --- a/src/cython/gudhi/nerve_gic.pyx +++ b/src/cython/gudhi/nerve_gic.pyx @@ -4,6 +4,10 @@ from libcpp.utility cimport pair from libcpp.string cimport string from libcpp cimport bool import os +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree """ This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. @@ -158,9 +162,11 @@ cdef class CoverComplex: :returns: A simplex tree created from the Cover complex. :rtype: SimplexTree """ - simplex_tree = SimplexTree() - self.thisptr.create_simplex_tree(simplex_tree.thisptr) - return simplex_tree + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr + self.thisptr.create_simplex_tree(stree_ptr) + return stree def find_simplices(self): """Computes the simplices of the simplicial complex. diff --git a/src/cython/gudhi/tangential_complex.pyx b/src/cython/gudhi/tangential_complex.pyx index b2d55520..9eb22115 100644 --- a/src/cython/gudhi/tangential_complex.pyx +++ b/src/cython/gudhi/tangential_complex.pyx @@ -147,9 +147,6 @@ cdef class TangentialComplex: cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr self.thisptr.create_simplex_tree(stree_ptr) return stree - simplex_tree = SimplexTree() - self.thisptr.create_simplex_tree(simplex_tree.thisptr) - return simplex_tree def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0): """Attempts to fix inconsistencies by perturbing the point positions. -- cgit v1.2.3 From c85464b9ea96d21731e710fa7d7a15e645d2c89a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 8 Aug 2019 10:44:30 +0200 Subject: Rename bottleneck_distance module name as bottleneck. Cannot have gudhi.bottleneck_distance.bottleneck_distance and gudhi.bottleneck_distance. --- .gitignore | 2 +- src/cython/CMakeLists.txt | 6 ++-- src/cython/gudhi/bottleneck.pyx | 49 ++++++++++++++++++++++++++++++++ src/cython/gudhi/bottleneck_distance.pyx | 49 -------------------------------- 4 files changed, 53 insertions(+), 53 deletions(-) create mode 100644 src/cython/gudhi/bottleneck.pyx delete mode 100644 src/cython/gudhi/bottleneck_distance.pyx diff --git a/.gitignore b/.gitignore index 8f8a8e6d..31efc180 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,7 @@ build/ # Generated by Cython src/cython/gudhi/simplex_tree.cpp src/cython/gudhi/alpha_complex.cpp -src/cython/gudhi/bottleneck_distance.cpp +src/cython/gudhi/bottleneck.cpp src/cython/gudhi/cubical_complex.cpp src/cython/gudhi/euclidean_strong_witness_complex.cpp src/cython/gudhi/off_reader.cpp diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index dc0dd59d..6dc11400 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -42,8 +42,8 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'reader_utils', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'witness_complex', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'strong_witness_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'persistence_graphical_tools' ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'bottleneck_distance', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'persistence_graphical_tools', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'bottleneck', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'subsampling', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'tangential_complex', ") @@ -103,7 +103,7 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'witness_complex', ") set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'strong_witness_complex', ") if (NOT CGAL_VERSION VERSION_LESS 4.11.0) - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'bottleneck_distance', ") + set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'bottleneck', ") set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'nerve_gic', ") endif () if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/cython/gudhi/bottleneck.pyx b/src/cython/gudhi/bottleneck.pyx new file mode 100644 index 00000000..4b378cbc --- /dev/null +++ b/src/cython/gudhi/bottleneck.pyx @@ -0,0 +1,49 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": + double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) + double bottleneck(vector[pair[double, double]], vector[pair[double, double]]) + +def bottleneck_distance(diagram_1, diagram_2, e=None): + """This function returns the point corresponding to a given vertex. + + :param diagram_1: The first diagram. + :type diagram_1: vector[pair[double, double]] + :param diagram_2: The second diagram. + :type diagram_2: vector[pair[double, double]] + :param e: If `e` is 0, this uses an expensive algorithm to compute the + exact distance. + If `e` is not 0, it asks for an additive `e`-approximation, and + currently also allows a small multiplicative error (the last 2 or 3 + bits of the mantissa may be wrong). This version of the algorithm takes + advantage of the limited precision of `double` and is usually a lot + faster to compute, whatever the value of `e`. + + Thus, by default, `e` is the smallest positive double. + :type e: float + :rtype: float + :returns: the bottleneck distance. + """ + if e is None: + # Default value is the smallest double value (not 0, 0 is for exact version) + return bottleneck(diagram_1, diagram_2) + else: + # Can be 0 for exact version + return bottleneck(diagram_1, diagram_2, e) diff --git a/src/cython/gudhi/bottleneck_distance.pyx b/src/cython/gudhi/bottleneck_distance.pyx deleted file mode 100644 index 4b378cbc..00000000 --- a/src/cython/gudhi/bottleneck_distance.pyx +++ /dev/null @@ -1,49 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": - double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) - double bottleneck(vector[pair[double, double]], vector[pair[double, double]]) - -def bottleneck_distance(diagram_1, diagram_2, e=None): - """This function returns the point corresponding to a given vertex. - - :param diagram_1: The first diagram. - :type diagram_1: vector[pair[double, double]] - :param diagram_2: The second diagram. - :type diagram_2: vector[pair[double, double]] - :param e: If `e` is 0, this uses an expensive algorithm to compute the - exact distance. - If `e` is not 0, it asks for an additive `e`-approximation, and - currently also allows a small multiplicative error (the last 2 or 3 - bits of the mantissa may be wrong). This version of the algorithm takes - advantage of the limited precision of `double` and is usually a lot - faster to compute, whatever the value of `e`. - - Thus, by default, `e` is the smallest positive double. - :type e: float - :rtype: float - :returns: the bottleneck distance. - """ - if e is None: - # Default value is the smallest double value (not 0, 0 is for exact version) - return bottleneck(diagram_1, diagram_2) - else: - # Can be 0 for exact version - return bottleneck(diagram_1, diagram_2, e) -- cgit v1.2.3 From 5b2bbb23368313f4f1f4ff843d59071ab159b495 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 8 Aug 2019 11:28:20 +0200 Subject: Add persistence_graphical_tools mechanism --- src/cython/CMakeLists.txt | 4 +++- src/cython/gudhi/persistence_graphical_tools.py | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt index 6dc11400..ab20c6e6 100644 --- a/src/cython/CMakeLists.txt +++ b/src/cython/CMakeLists.txt @@ -42,7 +42,6 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'reader_utils', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'witness_complex', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'strong_witness_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'persistence_graphical_tools', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'bottleneck', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'subsampling', ") @@ -199,6 +198,9 @@ if(PYTHONINTERP_FOUND) file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi") configure_file("gudhi/__init__.py.in" "${CMAKE_CURRENT_BINARY_DIR}/gudhi/__init__.py" @ONLY) + # Other .py files + file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + add_custom_command( OUTPUT gudhi.so WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} diff --git a/src/cython/gudhi/persistence_graphical_tools.py b/src/cython/gudhi/persistence_graphical_tools.py index 34803222..181bc8ea 100644 --- a/src/cython/gudhi/persistence_graphical_tools.py +++ b/src/cython/gudhi/persistence_graphical_tools.py @@ -2,6 +2,9 @@ from os import path from math import isfinite import numpy as np +from gudhi.reader_utils import read_persistence_intervals_in_dimension +from gudhi.reader_utils import read_persistence_intervals_grouped_by_dimension + """ This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau, Bertrand Michel -- cgit v1.2.3 From d925d9aea81a60d149d7e2658c57658db4bf4deb Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 8 Aug 2019 16:28:09 +0200 Subject: try to debug windows bug --- .appveyor.yml | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 31eb48d4..a962bea7 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -13,17 +13,17 @@ environment: APPVEYOR_SAVE_CACHE_ON_ERROR: true matrix: - - target: Examples - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" + # - target: Examples + # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + # PYTHON: "C:\\Python37-x64" - - target: UnitaryTests - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" + # - target: UnitaryTests + # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + # PYTHON: "C:\\Python37-x64" - - target: Utilities - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" + # - target: Utilities + # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF + # PYTHON: "C:\\Python37-x64" - target: Python CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" @@ -56,7 +56,8 @@ build_script: - if [%target%]==[Python] ( cd src/cython & python setup.py install & - MSBuild RUN_TESTS.vcxproj + python -c "import gudhi; print('gudhi.__version__')" + #MSBuild RUN_TESTS.vcxproj ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From 69a5dd0bcccf5cd4f5bdca6f6224a7d0e50f6304 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 8 Aug 2019 16:45:34 +0200 Subject: try to debug windows bug --- .appveyor.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index a962bea7..d02e543e 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -57,7 +57,6 @@ build_script: cd src/cython & python setup.py install & python -c "import gudhi; print('gudhi.__version__')" - #MSBuild RUN_TESTS.vcxproj ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From 08a55d55f1e874dfaebe512838a0ef7046078522 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 8 Aug 2019 17:15:34 +0200 Subject: try to debug windows bug --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index d02e543e..35acfda2 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -56,7 +56,7 @@ build_script: - if [%target%]==[Python] ( cd src/cython & python setup.py install & - python -c "import gudhi; print('gudhi.__version__')" + python -c "import gudhi; print(gudhi.__version__)" ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From ffff6615fe8e444f735767757f669f268542336e Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 8 Aug 2019 17:41:00 +0200 Subject: try to debug windows bug --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 35acfda2..e2a0fd29 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -56,7 +56,7 @@ build_script: - if [%target%]==[Python] ( cd src/cython & python setup.py install & - python -c "import gudhi; print(gudhi.__version__)" + ctest -j 1 ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From 2a3423cb1fd5c666ad31c58c23bd394f3f13020d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 8 Aug 2019 17:51:11 +0200 Subject: ctest requires -C on Windows --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index e2a0fd29..178c5297 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -56,7 +56,7 @@ build_script: - if [%target%]==[Python] ( cd src/cython & python setup.py install & - ctest -j 1 + ctest -j 1 -C Release ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From 42776a519c13573c8a38fa71d6316576e0ee4780 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 9 Aug 2019 07:48:20 +0200 Subject: Add verbose for ctest --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 178c5297..6fb396e7 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -56,7 +56,7 @@ build_script: - if [%target%]==[Python] ( cd src/cython & python setup.py install & - ctest -j 1 -C Release + ctest -j 1 -C Release -V ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From fe70d07a0e0669c3273165911d4236f956bb959d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 9 Aug 2019 08:51:21 +0200 Subject: Try with cython.sln --- .appveyor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 6fb396e7..662e5c10 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -55,8 +55,8 @@ build_script: - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( cd src/cython & - python setup.py install & - ctest -j 1 -C Release -V + MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & + ctest -j 1 -C Release -R test_simplex_tree_py_test -V ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From 0f1be95b10de5998b0adb920aab0cc0a95881886 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 9 Aug 2019 08:59:46 +0200 Subject: Seems to work. try all tests --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 662e5c10..f625b384 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -56,7 +56,7 @@ build_script: - if [%target%]==[Python] ( cd src/cython & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & - ctest -j 1 -C Release -R test_simplex_tree_py_test -V + ctest -j 1 -C Release ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release -E diff_files -- cgit v1.2.3 From 51a38f313f633e1c593d77319f3752a010da782c Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 9 Aug 2019 09:20:18 +0200 Subject: Rollback comments as Python version works --- .appveyor.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index f625b384..ab943fb7 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -13,17 +13,17 @@ environment: APPVEYOR_SAVE_CACHE_ON_ERROR: true matrix: - # - target: Examples - # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - # PYTHON: "C:\\Python37-x64" + - target: Examples + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - # - target: UnitaryTests - # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - # PYTHON: "C:\\Python37-x64" + - target: UnitaryTests + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - # - target: Utilities - # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF - # PYTHON: "C:\\Python37-x64" + - target: Utilities + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - target: Python CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" -- cgit v1.2.3 From f057131902f352f8ccc30a0bacdfc69c5d21384b Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 10 Jul 2019 15:51:10 +0200 Subject: More max_alpha_square hiding/warning. Warn about alternate definition of Rips. --- src/cython/doc/alpha_complex_user.rst | 7 ++++--- src/cython/doc/rips_complex_user.rst | 4 +++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/cython/doc/alpha_complex_user.rst b/src/cython/doc/alpha_complex_user.rst index d1e9c7cd..f9662a6d 100644 --- a/src/cython/doc/alpha_complex_user.rst +++ b/src/cython/doc/alpha_complex_user.rst @@ -28,7 +28,7 @@ This example builds the Delaunay triangulation from the given points, and initia import gudhi alpha_complex = gudhi.AlphaComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]]) - simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0) + simplex_tree = alpha_complex.create_simplex_tree() result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ repr(simplex_tree.num_simplices()) + ' simplices - ' + \ repr(simplex_tree.num_vertices()) + ' vertices.' @@ -146,8 +146,9 @@ Prune above given filtration value ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()` -in the `C++ version `_). -In the following example, the value is given by the user as argument of the program. +in the `C++ version `_). Note that this does not provide any kind +of speed-up, since we always first build the full filtered complex, so it is recommended not to use `max_alpha_square`. +In the following example, a threshold of 59 is used. Example from OFF file diff --git a/src/cython/doc/rips_complex_user.rst b/src/cython/doc/rips_complex_user.rst index 1d340dbe..3f6b960d 100644 --- a/src/cython/doc/rips_complex_user.rst +++ b/src/cython/doc/rips_complex_user.rst @@ -19,7 +19,9 @@ The `Rips complex ` generalizes proximity (:math:`\varepsilon`-ball) graphs to higher dimensions. The vertices correspond to the input points, and a simplex is present if and only if its diameter is smaller than some parameter α. Considering all parameters α defines a filtered simplicial complex, where the filtration value of a simplex is its diameter. -The filtration can be restricted to values α smaller than some threshold, to reduce its size. +The filtration can be restricted to values α smaller than some threshold, to reduce its size. Beware that some +people define the Rips complex using a bound of 2α instead of α, particularly when comparing it to an ambient +Čech complex. They end up with the same combinatorial object, but filtration values which are half of ours. The input discrete metric space can be provided as a point cloud plus a distance function, or as a distance matrix. -- cgit v1.2.3 From 331fee1010245f487abfa0381dd9e9936654a016 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 14 Aug 2019 08:21:22 +0200 Subject: Fix #35 Modify Copyright as GUDHI is not only a C++ library --- .../concept/SimplicialComplexForAlpha3d.h | 6 ++---- src/Alpha_complex/doc/COPYRIGHT | 19 ++++++------------ src/Alpha_complex/doc/Intro_alpha_complex.h | 6 ++---- src/Alpha_complex/include/gudhi/Alpha_complex.h | 6 ++---- src/Alpha_complex/include/gudhi/Alpha_complex_3d.h | 6 ++---- .../include/gudhi/Alpha_complex_options.h | 6 ++---- .../test/Alpha_complex_3d_unit_test.cpp | 6 ++---- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 6 ++---- .../test/Periodic_alpha_complex_3d_unit_test.cpp | 6 ++---- .../test/Weighted_alpha_complex_3d_unit_test.cpp | 6 ++---- ...eighted_periodic_alpha_complex_3d_unit_test.cpp | 6 ++---- .../utilities/alpha_complex_3d_persistence.cpp | 6 ++---- .../utilities/alpha_complex_persistence.cpp | 6 ++---- src/Bitmap_cubical_complex/doc/COPYRIGHT | 21 +++++++------------- .../doc/Gudhi_Cubical_Complex_doc.h | 6 ++---- .../example/Random_bitmap_cubical_complex.cpp | 6 ++---- .../include/gudhi/Bitmap_cubical_complex.h | 6 ++---- .../include/gudhi/Bitmap_cubical_complex/counter.h | 6 ++---- .../include/gudhi/Bitmap_cubical_complex_base.h | 6 ++---- ...cal_complex_periodic_boundary_conditions_base.h | 6 ++---- src/Bitmap_cubical_complex/test/Bitmap_test.cpp | 6 ++---- .../utilities/cubical_complex_persistence.cpp | 6 ++---- .../periodic_cubical_complex_persistence.cpp | 6 ++---- .../benchmark/bottleneck_chrono.cpp | 6 ++---- .../concept/Persistence_diagram.h | 6 ++---- src/Bottleneck_distance/doc/COPYRIGHT | 21 +++++++------------- .../doc/Intro_bottleneck_distance.h | 6 ++---- .../alpha_rips_persistence_bottleneck_distance.cpp | 6 ++---- src/Bottleneck_distance/include/gudhi/Bottleneck.h | 6 ++---- .../include/gudhi/Graph_matching.h | 6 ++---- .../include/gudhi/Internal_point.h | 6 ++---- .../include/gudhi/Neighbors_finder.h | 6 ++---- .../include/gudhi/Persistence_graph.h | 6 ++---- .../test/bottleneck_unit_test.cpp | 6 ++---- .../utilities/bottleneck_distance.cpp | 6 ++---- .../benchmark/cech_complex_benchmark.cpp | 6 ++---- src/Cech_complex/doc/COPYRIGHT | 19 ++++++------------ src/Cech_complex/doc/Intro_cech_complex.h | 6 ++---- .../example/cech_complex_step_by_step.cpp | 6 ++---- src/Cech_complex/include/gudhi/Cech_complex.h | 6 ++---- .../include/gudhi/Cech_complex_blocker.h | 6 ++---- src/Cech_complex/test/test_cech_complex.cpp | 6 ++---- src/Cech_complex/utilities/cech_persistence.cpp | 6 ++---- src/Contraction/doc/COPYRIGHT | 22 ++++++++------------- src/Contraction/example/Rips_contraction.cpp | 6 ++---- src/Contraction/include/gudhi/Edge_contraction.h | 6 ++---- .../include/gudhi/Skeleton_blocker_contractor.h | 6 ++---- src/GudhUI/gui/MainWindow.cpp | 6 ++---- src/GudhUI/gui/MainWindow.h | 6 ++---- src/GudhUI/gui/Menu_edge_contraction.cpp | 6 ++---- src/GudhUI/gui/Menu_edge_contraction.h | 6 ++---- src/GudhUI/gui/Menu_k_nearest_neighbors.cpp | 6 ++---- src/GudhUI/gui/Menu_k_nearest_neighbors.h | 6 ++---- src/GudhUI/utils/Is_manifold.h | 9 ++------- src/Nerve_GIC/doc/COPYRIGHT | 23 ++++++++-------------- src/Nerve_GIC/doc/Intro_graph_induced_complex.h | 6 ++---- src/Nerve_GIC/example/CoordGIC.cpp | 6 ++---- src/Nerve_GIC/example/FuncGIC.cpp | 6 ++---- src/Nerve_GIC/include/gudhi/GIC.h | 6 ++---- src/Nerve_GIC/test/test_GIC.cpp | 6 ++---- src/Nerve_GIC/utilities/Nerve.cpp | 6 ++---- src/Nerve_GIC/utilities/VoronoiGIC.cpp | 6 ++---- .../concept/Real_valued_topological_data.h | 6 ++---- .../concept/Topological_data_with_averages.h | 6 ++---- .../concept/Topological_data_with_distances.h | 6 ++---- .../concept/Topological_data_with_scalar_product.h | 6 ++---- .../concept/Vectorized_topological_data.h | 6 ++---- .../doc/Persistence_representations_doc.h | 6 ++---- .../example/persistence_heat_maps.cpp | 6 ++---- .../example/persistence_intervals.cpp | 6 ++---- .../example/persistence_landscape.cpp | 6 ++---- .../example/persistence_landscape_on_grid.cpp | 6 ++---- .../example/persistence_vectors.cpp | 6 ++---- .../example/sliced_wasserstein.cpp | 6 ++---- .../include/gudhi/PSSK.h | 6 ++---- .../include/gudhi/Persistence_heat_maps.h | 6 ++---- .../include/gudhi/Persistence_intervals.h | 6 ++---- .../gudhi/Persistence_intervals_with_distances.h | 6 ++---- .../include/gudhi/Persistence_landscape.h | 6 ++---- .../include/gudhi/Persistence_vectors.h | 6 ++---- .../include/gudhi/Sliced_Wasserstein.h | 6 ++---- .../gudhi/common_persistence_representations.h | 6 ++---- .../include/gudhi/read_persistence_from_file.h | 6 ++---- src/Persistence_representations/test/kernels.cpp | 6 ++---- .../test/persistence_heat_maps_test.cpp | 6 ++---- .../test/persistence_intervals_test.cpp | 6 ++---- .../persistence_intervals_with_distances_test.cpp | 6 ++---- .../test/persistence_lanscapes_on_grid_test.cpp | 6 ++---- .../test/persistence_lanscapes_test.cpp | 6 ++---- .../test/read_persistence_from_file_test.cpp | 6 ++---- .../test/vector_representation_test.cpp | 6 ++---- .../average_persistence_heat_maps.cpp | 6 ++---- .../compute_distance_of_persistence_heat_maps.cpp | 6 ++---- ...ute_scalar_product_of_persistence_heat_maps.cpp | 6 ++---- ...te_p_h_m_weighted_by_distance_from_diagonal.cpp | 6 ++---- ...ate_p_h_m_weighted_by_squared_diag_distance.cpp | 6 ++---- .../create_persistence_heat_maps.cpp | 6 ++---- .../persistence_heat_maps/create_pssk.cpp | 6 ++---- .../plot_persistence_heat_map.cpp | 6 ++---- ...te_birth_death_range_in_persistence_diagram.cpp | 6 ++---- .../compute_bottleneck_distance.cpp | 6 ++---- .../compute_number_of_dominant_intervals.cpp | 6 ++---- .../plot_histogram_of_intervals_lengths.cpp | 6 ++---- .../plot_persistence_Betti_numbers.cpp | 6 ++---- .../plot_persistence_intervals.cpp | 6 ++---- .../persistence_landscapes/average_landscapes.cpp | 6 ++---- .../compute_distance_of_landscapes.cpp | 6 ++---- .../compute_scalar_product_of_landscapes.cpp | 6 ++---- .../persistence_landscapes/create_landscapes.cpp | 6 ++---- .../persistence_landscapes/plot_landscapes.cpp | 6 ++---- .../average_landscapes_on_grid.cpp | 6 ++---- .../compute_distance_of_landscapes_on_grid.cpp | 6 ++---- ...ompute_scalar_product_of_landscapes_on_grid.cpp | 6 ++---- .../create_landscapes_on_grid.cpp | 6 ++---- .../plot_landscapes_on_grid.cpp | 6 ++---- .../average_persistence_vectors.cpp | 6 ++---- .../compute_distance_of_persistence_vectors.cpp | 6 ++---- ...mpute_scalar_product_of_persistence_vectors.cpp | 6 ++---- .../create_persistence_vectors.cpp | 6 ++---- .../plot_persistence_vectors.cpp | 6 ++---- src/Persistent_cohomology/doc/COPYRIGHT | 21 +++++++------------- .../doc/Intro_persistent_cohomology.h | 6 ++---- .../example/custom_persistence_sort.cpp | 6 ++---- .../persistence_from_simple_simplex_tree.cpp | 6 ++---- .../example/plain_homology.cpp | 6 ++---- .../include/gudhi/Persistent_cohomology.h | 6 ++---- .../include/gudhi/Persistent_cohomology/Field_Zp.h | 6 ++---- .../gudhi/Persistent_cohomology/Multi_field.h | 6 ++---- .../Persistent_cohomology_column.h | 6 ++---- src/Rips_complex/doc/COPYRIGHT | 21 +++++++------------- src/Rips_complex/doc/Intro_rips_complex.h | 6 ++---- src/Rips_complex/include/gudhi/Rips_complex.h | 6 ++---- .../include/gudhi/Sparse_rips_complex.h | 6 ++---- src/Rips_complex/test/test_rips_complex.cpp | 6 ++---- .../rips_correlation_matrix_persistence.cpp | 6 ++---- .../utilities/rips_distance_matrix_persistence.cpp | 6 ++---- src/Rips_complex/utilities/rips_persistence.cpp | 6 ++---- .../utilities/sparse_rips_persistence.cpp | 6 ++---- src/Simplex_tree/doc/COPYRIGHT | 21 +++++++------------- src/Simplex_tree/doc/Intro_simplex_tree.h | 6 ++---- .../example/cech_complex_cgal_mini_sphere_3d.cpp | 6 ++---- ...e_alpha_shapes_3_simplex_tree_from_off_file.cpp | 6 ++---- .../example/graph_expansion_with_blocker.cpp | 6 ++---- src/Simplex_tree/example/mini_simplex_tree.cpp | 6 ++---- src/Simplex_tree/example/simple_simplex_tree.cpp | 6 ++---- .../example/simplex_tree_from_cliques_of_graph.cpp | 6 ++---- src/Simplex_tree/include/gudhi/Simplex_tree.h | 6 ++---- .../gudhi/Simplex_tree/Simplex_tree_iterators.h | 6 ++---- .../Simplex_tree_node_explicit_storage.h | 6 ++---- .../gudhi/Simplex_tree/Simplex_tree_siblings.h | 6 ++---- .../include/gudhi/Simplex_tree/indexing_tag.h | 6 ++---- .../test/simplex_tree_ctor_and_move_unit_test.cpp | 10 ++++++++++ .../simplex_tree_graph_expansion_unit_test.cpp | 10 ++++++++++ .../simplex_tree_iostream_operator_unit_test.cpp | 10 ++++++++++ .../test/simplex_tree_remove_unit_test.cpp | 10 ++++++++++ src/Simplex_tree/test/simplex_tree_unit_test.cpp | 10 ++++++++++ src/Skeleton_blocker/concept/SkeletonBlockerDS.h | 6 ++---- .../concept/SkeletonBlockerGeometricDS.h | 6 ++---- src/Skeleton_blocker/doc/COPYRIGHT | 22 ++++++++------------- .../include/gudhi/Skeleton_blocker.h | 6 ++---- .../Skeleton_blocker_complex_visitor.h | 6 ++---- .../Skeleton_blocker_link_superior.h | 6 ++---- .../Skeleton_blocker/Skeleton_blocker_off_io.h | 6 ++---- .../Skeleton_blocker_simple_geometric_traits.h | 6 ++---- .../Skeleton_blocker_simple_traits.h | 6 ++---- .../Skeleton_blocker/Skeleton_blocker_simplex.h | 6 ++---- .../Skeleton_blocker_sub_complex.h | 6 ++---- .../gudhi/Skeleton_blocker/internal/Top_faces.h | 6 ++---- .../Skeleton_blockers_blockers_iterators.h | 6 ++---- .../iterators/Skeleton_blockers_edges_iterators.h | 6 ++---- .../Skeleton_blockers_simplices_iterators.h | 6 ++---- .../Skeleton_blockers_triangles_iterators.h | 6 ++---- .../Skeleton_blockers_vertices_iterators.h | 6 ++---- .../include/gudhi/Skeleton_blocker_complex.h | 6 ++---- .../gudhi/Skeleton_blocker_geometric_complex.h | 6 ++---- .../include/gudhi/Skeleton_blocker_link_complex.h | 6 ++---- .../gudhi/Skeleton_blocker_simplifiable_complex.h | 6 ++---- .../test/test_skeleton_blocker_complex.cpp | 7 +++---- .../doc/Intro_spatial_searching.h | 6 ++---- .../include/gudhi/Kd_tree_search.h | 6 ++---- src/Spatial_searching/test/test_Kd_tree_search.cpp | 6 ++---- src/Subsampling/doc/Intro_subsampling.h | 6 ++---- .../include/gudhi/choose_n_farthest_points.h | 6 ++---- .../include/gudhi/pick_n_random_points.h | 6 ++---- src/Subsampling/include/gudhi/sparsify_point_set.h | 6 ++---- .../test/test_choose_n_farthest_points.cpp | 6 ++---- src/Subsampling/test/test_pick_n_random_points.cpp | 6 ++---- src/Subsampling/test/test_sparsify_point_set.cpp | 6 ++---- src/Tangential_complex/benchmark/RIB_exporter.h | 6 ++---- src/Tangential_complex/benchmark/XML_exporter.h | 6 ++---- src/Tangential_complex/doc/COPYRIGHT | 21 +++++++------------- .../doc/Intro_tangential_complex.h | 6 ++---- .../include/gudhi/Tangential_complex.h | 6 ++---- .../gudhi/Tangential_complex/Simplicial_complex.h | 6 ++---- .../include/gudhi/Tangential_complex/config.h | 6 ++---- .../include/gudhi/Tangential_complex/utilities.h | 6 ++---- .../test/test_tangential_complex.cpp | 6 ++---- src/Toplex_map/benchmark/benchmark_tm.cpp | 6 ++---- src/Toplex_map/doc/Intro_Toplex_map.h | 6 ++---- src/Toplex_map/example/simple_toplex_map.cpp | 6 ++---- src/Toplex_map/include/gudhi/Lazy_toplex_map.h | 6 ++---- src/Toplex_map/include/gudhi/Toplex_map.h | 6 ++---- src/Toplex_map/test/lazy_toplex_map_unit_test.cpp | 6 ++---- src/Toplex_map/test/toplex_map_unit_test.cpp | 6 ++---- src/Witness_complex/doc/COPYRIGHT | 21 +++++++------------- src/Witness_complex/example/generators.h | 6 ++---- .../include/gudhi/Active_witness/Active_witness.h | 6 ++---- .../gudhi/Active_witness/Active_witness_iterator.h | 6 ++---- .../gudhi/Euclidean_strong_witness_complex.h | 6 ++---- .../include/gudhi/Euclidean_witness_complex.h | 6 ++---- .../include/gudhi/Strong_witness_complex.h | 6 ++---- .../include/gudhi/Witness_complex.h | 6 ++---- .../include/gudhi/Witness_complex/all_faces_in.h | 6 ++---- .../utilities/strong_witness_persistence.cpp | 6 ++---- .../utilities/weak_witness_persistence.cpp | 6 ++---- .../Graph_simplicial_complex_benchmark.cpp | 6 ++---- src/common/include/gudhi/Debug_utils.h | 6 ++---- src/common/include/gudhi/Null_output_iterator.h | 6 ++---- src/common/include/gudhi/Points_3D_off_io.h | 6 ++---- src/common/include/gudhi/Points_off_io.h | 6 ++---- src/common/include/gudhi/Simple_object_pool.h | 6 ++---- src/common/include/gudhi/Unitary_tests_utils.h | 6 ++---- src/common/include/gudhi/allocator.h | 6 ++---- src/common/include/gudhi/console_color.h | 6 ++---- src/common/include/gudhi/random_point_generators.h | 6 ++---- src/common/include/gudhi/reader_utils.h | 6 ++---- .../include/gudhi/writing_persistence_to_file.h | 6 ++---- src/common/test/test_distance_matrix_reader.cpp | 6 ++---- .../test/test_persistence_intervals_reader.cpp | 6 ++---- src/common/test/test_points_off_reader.cpp | 6 ++---- .../utilities/off_file_from_shape_generator.cpp | 6 ++---- src/cython/include/Alpha_complex_interface.h | 6 ++---- src/cython/include/Bottleneck_distance_interface.h | 6 ++---- src/cython/include/Cubical_complex_interface.h | 6 ++---- .../Euclidean_strong_witness_complex_interface.h | 6 ++---- .../include/Euclidean_witness_complex_interface.h | 6 ++---- src/cython/include/Nerve_gic_interface.h | 6 ++---- src/cython/include/Off_reader_interface.h | 6 ++---- .../include/Persistent_cohomology_interface.h | 6 ++---- src/cython/include/Reader_utils_interface.h | 6 ++---- src/cython/include/Rips_complex_interface.h | 6 ++---- src/cython/include/Simplex_tree_interface.h | 6 ++---- .../include/Strong_witness_complex_interface.h | 6 ++---- src/cython/include/Subsampling_interface.h | 6 ++---- src/cython/include/Tangential_complex_interface.h | 6 ++---- src/cython/include/Witness_complex_interface.h | 6 ++---- 246 files changed, 594 insertions(+), 1086 deletions(-) diff --git a/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h b/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h index 1dc8c037..3a6830ff 100644 --- a/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h +++ b/src/Alpha_complex/concept/SimplicialComplexForAlpha3d.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Alpha_complex/doc/COPYRIGHT b/src/Alpha_complex/doc/COPYRIGHT index 5f1d97cc..61f17f6d 100644 --- a/src/Alpha_complex/doc/COPYRIGHT +++ b/src/Alpha_complex/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index 5b332e47..b075d1fc 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index d5865671..cdc1ed1d 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h index 47407b38..9bff42b5 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex_options.h b/src/Alpha_complex/include/gudhi/Alpha_complex_options.h index bf29039b..85c83672 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex_options.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex_options.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp index 8e4121ae..1102838a 100644 --- a/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index de0cf471..01e4cee3 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp index 4449e5ed..ac3791a4 100644 --- a/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp index 201dbce4..44deb930 100644 --- a/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp index 9d711c41..670c7799 100644 --- a/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp +++ b/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp index b9991b83..2272576e 100644 --- a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp +++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria diff --git a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp index 12a8740e..fab7bd30 100644 --- a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp +++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/Bitmap_cubical_complex/doc/COPYRIGHT b/src/Bitmap_cubical_complex/doc/COPYRIGHT index 2b14dcb9..61f17f6d 100644 --- a/src/Bitmap_cubical_complex/doc/COPYRIGHT +++ b/src/Bitmap_cubical_complex/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): Pawel Dlotko +Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h index 237f79ad..d2b9ccd6 100644 --- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h +++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp index 533aec91..46ea8f2e 100644 --- a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp +++ b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index 1954eb0c..37514dee 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h index 84d53778..fdcea230 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h index a0ad40fc..0d6299d2 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h index 4afed33c..edd794fe 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp index d3ed75d3..f18adb36 100644 --- a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp +++ b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp index 6cf0889f..a9792c2d 100644 --- a/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp +++ b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp index 700d90f4..fa97bac0 100644 --- a/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp +++ b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp b/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp index db3c9815..576d510b 100644 --- a/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp +++ b/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Francois Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/concept/Persistence_diagram.h b/src/Bottleneck_distance/concept/Persistence_diagram.h index de8021cc..8c8761cb 100644 --- a/src/Bottleneck_distance/concept/Persistence_diagram.h +++ b/src/Bottleneck_distance/concept/Persistence_diagram.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/doc/COPYRIGHT b/src/Bottleneck_distance/doc/COPYRIGHT index 1c2016b1..61f17f6d 100644 --- a/src/Bottleneck_distance/doc/COPYRIGHT +++ b/src/Bottleneck_distance/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): François Godi +Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h index d58392ae..bbc952e1 100644 --- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h +++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp index c5d66121..6c0dc9bf 100644 --- a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp +++ b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2017 Inria diff --git a/src/Bottleneck_distance/include/gudhi/Bottleneck.h b/src/Bottleneck_distance/include/gudhi/Bottleneck.h index 105f1a93..d31c82ee 100644 --- a/src/Bottleneck_distance/include/gudhi/Bottleneck.h +++ b/src/Bottleneck_distance/include/gudhi/Bottleneck.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Francois Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/include/gudhi/Graph_matching.h b/src/Bottleneck_distance/include/gudhi/Graph_matching.h index 6385ae30..7b7623ce 100644 --- a/src/Bottleneck_distance/include/gudhi/Graph_matching.h +++ b/src/Bottleneck_distance/include/gudhi/Graph_matching.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Francois Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/include/gudhi/Internal_point.h b/src/Bottleneck_distance/include/gudhi/Internal_point.h index 9d268af3..f829943e 100644 --- a/src/Bottleneck_distance/include/gudhi/Internal_point.h +++ b/src/Bottleneck_distance/include/gudhi/Internal_point.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Francois Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h index 8a75384c..c65e6082 100644 --- a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h +++ b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Francois Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h index 3e82a4c9..f791e37c 100644 --- a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h +++ b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Francois Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp index 5f20892c..3fc6fc7b 100644 --- a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp +++ b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Francois Godi * * Copyright (C) 2015 Inria diff --git a/src/Bottleneck_distance/utilities/bottleneck_distance.cpp b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp index fc03cb21..d88a8a0b 100644 --- a/src/Bottleneck_distance/utilities/bottleneck_distance.cpp +++ b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Authors: Francois Godi, small modifications by Pawel Dlotko * * Copyright (C) 2015 Inria diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index df12e06d..d2d71dbf 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Cech_complex/doc/COPYRIGHT b/src/Cech_complex/doc/COPYRIGHT index 5f1d97cc..61f17f6d 100644 --- a/src/Cech_complex/doc/COPYRIGHT +++ b/src/Cech_complex/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h index 250c91fa..90086de7 100644 --- a/src/Cech_complex/doc/Intro_cech_complex.h +++ b/src/Cech_complex/doc/Intro_cech_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index d9d17b26..b3d05697 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index cc69f35f..b0871e10 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index c0c03bb0..068cdde3 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index 8df71b15..c6b15d7f 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp index 78d47a5a..8cfe018b 100644 --- a/src/Cech_complex/utilities/cech_persistence.cpp +++ b/src/Cech_complex/utilities/cech_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/Contraction/doc/COPYRIGHT b/src/Contraction/doc/COPYRIGHT index 5b606ac2..61f17f6d 100644 --- a/src/Contraction/doc/COPYRIGHT +++ b/src/Contraction/doc/COPYRIGHT @@ -1,18 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): David Salinas -Copyright (C) 2015 Inria +Author(s): Vincent Rouvreau -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +Copyright (C) 2015 Inria -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Contraction/example/Rips_contraction.cpp b/src/Contraction/example/Rips_contraction.cpp index c41a9d94..b5ce06c1 100644 --- a/src/Contraction/example/Rips_contraction.cpp +++ b/src/Contraction/example/Rips_contraction.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index 5cd024bd..6058d64b 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h index 7a99548d..d9f8d9f4 100644 --- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h +++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/GudhUI/gui/MainWindow.cpp b/src/GudhUI/gui/MainWindow.cpp index 05bd42c8..54415e6a 100644 --- a/src/GudhUI/gui/MainWindow.cpp +++ b/src/GudhUI/gui/MainWindow.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/GudhUI/gui/MainWindow.h b/src/GudhUI/gui/MainWindow.h index d9281e57..5f9ab2fc 100644 --- a/src/GudhUI/gui/MainWindow.h +++ b/src/GudhUI/gui/MainWindow.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/GudhUI/gui/Menu_edge_contraction.cpp b/src/GudhUI/gui/Menu_edge_contraction.cpp index 5a517cef..f2938d61 100644 --- a/src/GudhUI/gui/Menu_edge_contraction.cpp +++ b/src/GudhUI/gui/Menu_edge_contraction.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/GudhUI/gui/Menu_edge_contraction.h b/src/GudhUI/gui/Menu_edge_contraction.h index 7cbf60ee..f44da8d3 100644 --- a/src/GudhUI/gui/Menu_edge_contraction.h +++ b/src/GudhUI/gui/Menu_edge_contraction.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp b/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp index 69ed6c56..d050ee34 100644 --- a/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp +++ b/src/GudhUI/gui/Menu_k_nearest_neighbors.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/GudhUI/gui/Menu_k_nearest_neighbors.h b/src/GudhUI/gui/Menu_k_nearest_neighbors.h index e29ffc0b..cfefde48 100644 --- a/src/GudhUI/gui/Menu_k_nearest_neighbors.h +++ b/src/GudhUI/gui/Menu_k_nearest_neighbors.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/GudhUI/utils/Is_manifold.h b/src/GudhUI/utils/Is_manifold.h index 276f4332..2eb79d21 100644 --- a/src/GudhUI/utils/Is_manifold.h +++ b/src/GudhUI/utils/Is_manifold.h @@ -1,10 +1,5 @@ -/* - * Is_manifold.h - * Created on: Jan 28, 2015 - * This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Nerve_GIC/doc/COPYRIGHT b/src/Nerve_GIC/doc/COPYRIGHT index 6b33053e..61f17f6d 100644 --- a/src/Nerve_GIC/doc/COPYRIGHT +++ b/src/Nerve_GIC/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): Mathieu Carrière +Author(s): Vincent Rouvreau -Copyright (C) 2017 Inria +Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h index d709baec..f9441b24 100644 --- a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h +++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carriere * * Copyright (C) 2017 Inria diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp index b3a79233..fd9c224a 100644 --- a/src/Nerve_GIC/example/CoordGIC.cpp +++ b/src/Nerve_GIC/example/CoordGIC.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carrière * * Copyright (C) 2017 Inria diff --git a/src/Nerve_GIC/example/FuncGIC.cpp b/src/Nerve_GIC/example/FuncGIC.cpp index 2f0b5f2b..5a323795 100644 --- a/src/Nerve_GIC/example/FuncGIC.cpp +++ b/src/Nerve_GIC/example/FuncGIC.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carrière * * Copyright (C) 2017 Inria diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h index d98deeac..a1621ad9 100644 --- a/src/Nerve_GIC/include/gudhi/GIC.h +++ b/src/Nerve_GIC/include/gudhi/GIC.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: Mathieu Carriere * * Copyright (C) 2017 Inria diff --git a/src/Nerve_GIC/test/test_GIC.cpp b/src/Nerve_GIC/test/test_GIC.cpp index 06b3f832..9d326234 100644 --- a/src/Nerve_GIC/test/test_GIC.cpp +++ b/src/Nerve_GIC/test/test_GIC.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carrière * * Copyright (C) 2017 Inria diff --git a/src/Nerve_GIC/utilities/Nerve.cpp b/src/Nerve_GIC/utilities/Nerve.cpp index ef8e2d7e..d34e922c 100644 --- a/src/Nerve_GIC/utilities/Nerve.cpp +++ b/src/Nerve_GIC/utilities/Nerve.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carrière * * Copyright (C) 2017 Inria diff --git a/src/Nerve_GIC/utilities/VoronoiGIC.cpp b/src/Nerve_GIC/utilities/VoronoiGIC.cpp index fabe35c9..0182c948 100644 --- a/src/Nerve_GIC/utilities/VoronoiGIC.cpp +++ b/src/Nerve_GIC/utilities/VoronoiGIC.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carrière * * Copyright (C) 2017 Inria diff --git a/src/Persistence_representations/concept/Real_valued_topological_data.h b/src/Persistence_representations/concept/Real_valued_topological_data.h index 9b29a9bd..12aceab4 100644 --- a/src/Persistence_representations/concept/Real_valued_topological_data.h +++ b/src/Persistence_representations/concept/Real_valued_topological_data.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/concept/Topological_data_with_averages.h b/src/Persistence_representations/concept/Topological_data_with_averages.h index b1b5ca80..d8fe98b7 100644 --- a/src/Persistence_representations/concept/Topological_data_with_averages.h +++ b/src/Persistence_representations/concept/Topological_data_with_averages.h @@ -1,8 +1,6 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/concept/Topological_data_with_distances.h b/src/Persistence_representations/concept/Topological_data_with_distances.h index 87c3d158..8efebf7c 100644 --- a/src/Persistence_representations/concept/Topological_data_with_distances.h +++ b/src/Persistence_representations/concept/Topological_data_with_distances.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/concept/Topological_data_with_scalar_product.h b/src/Persistence_representations/concept/Topological_data_with_scalar_product.h index 063f37ed..0b43b4f7 100644 --- a/src/Persistence_representations/concept/Topological_data_with_scalar_product.h +++ b/src/Persistence_representations/concept/Topological_data_with_scalar_product.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/concept/Vectorized_topological_data.h b/src/Persistence_representations/concept/Vectorized_topological_data.h index dd1224d2..b8a16102 100644 --- a/src/Persistence_representations/concept/Vectorized_topological_data.h +++ b/src/Persistence_representations/concept/Vectorized_topological_data.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index 111e532b..3a5b9c89 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/example/persistence_heat_maps.cpp b/src/Persistence_representations/example/persistence_heat_maps.cpp index a7e64bb1..1bf3a637 100644 --- a/src/Persistence_representations/example/persistence_heat_maps.cpp +++ b/src/Persistence_representations/example/persistence_heat_maps.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko and Mathieu Carriere * * Copyright (C) 2019 Inria diff --git a/src/Persistence_representations/example/persistence_intervals.cpp b/src/Persistence_representations/example/persistence_intervals.cpp index c94f7fe0..c908581c 100644 --- a/src/Persistence_representations/example/persistence_intervals.cpp +++ b/src/Persistence_representations/example/persistence_intervals.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/example/persistence_landscape.cpp b/src/Persistence_representations/example/persistence_landscape.cpp index 70aff546..ff18d105 100644 --- a/src/Persistence_representations/example/persistence_landscape.cpp +++ b/src/Persistence_representations/example/persistence_landscape.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/example/persistence_landscape_on_grid.cpp b/src/Persistence_representations/example/persistence_landscape_on_grid.cpp index 7df34ef9..16a58e1d 100644 --- a/src/Persistence_representations/example/persistence_landscape_on_grid.cpp +++ b/src/Persistence_representations/example/persistence_landscape_on_grid.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/example/persistence_vectors.cpp b/src/Persistence_representations/example/persistence_vectors.cpp index d0cf2fc8..b27e52d2 100644 --- a/src/Persistence_representations/example/persistence_vectors.cpp +++ b/src/Persistence_representations/example/persistence_vectors.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp index 6c01c3af..d5414d00 100644 --- a/src/Persistence_representations/example/sliced_wasserstein.cpp +++ b/src/Persistence_representations/example/sliced_wasserstein.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carriere * * Copyright (C) 2018 INRIA (France) diff --git a/src/Persistence_representations/include/gudhi/PSSK.h b/src/Persistence_representations/include/gudhi/PSSK.h index 995d0ca2..fc90d0f4 100644 --- a/src/Persistence_representations/include/gudhi/PSSK.h +++ b/src/Persistence_representations/include/gudhi/PSSK.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h index 12cb04c2..b1af3503 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h +++ b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko and Mathieu Carriere * * Modifications: diff --git a/src/Persistence_representations/include/gudhi/Persistence_intervals.h b/src/Persistence_representations/include/gudhi/Persistence_intervals.h index 47953596..e2db4572 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_intervals.h +++ b/src/Persistence_representations/include/gudhi/Persistence_intervals.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi hiLibrary. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h b/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h index 9f605d35..98543f2f 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h +++ b/src/Persistence_representations/include/gudhi/Persistence_intervals_with_distances.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi hiLibrary. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape.h b/src/Persistence_representations/include/gudhi/Persistence_landscape.h index f949372b..b819ccb6 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_landscape.h +++ b/src/Persistence_representations/include/gudhi/Persistence_landscape.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/include/gudhi/Persistence_vectors.h b/src/Persistence_representations/include/gudhi/Persistence_vectors.h index a8f07b3b..6776f4a3 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_vectors.h +++ b/src/Persistence_representations/include/gudhi/Persistence_vectors.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h index fbe12422..e3ed2f6a 100644 --- a/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h +++ b/src/Persistence_representations/include/gudhi/Sliced_Wasserstein.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carriere * * Copyright (C) 2018 Inria diff --git a/src/Persistence_representations/include/gudhi/common_persistence_representations.h b/src/Persistence_representations/include/gudhi/common_persistence_representations.h index 488d4529..5eff0192 100644 --- a/src/Persistence_representations/include/gudhi/common_persistence_representations.h +++ b/src/Persistence_representations/include/gudhi/common_persistence_representations.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h index db21c714..5c2d2038 100644 --- a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h +++ b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/test/kernels.cpp b/src/Persistence_representations/test/kernels.cpp index eb27747c..63089538 100644 --- a/src/Persistence_representations/test/kernels.cpp +++ b/src/Persistence_representations/test/kernels.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Mathieu Carrière * * Copyright (C) 2018 INRIA diff --git a/src/Persistence_representations/test/persistence_heat_maps_test.cpp b/src/Persistence_representations/test/persistence_heat_maps_test.cpp index 57e1b4b3..b3240758 100644 --- a/src/Persistence_representations/test/persistence_heat_maps_test.cpp +++ b/src/Persistence_representations/test/persistence_heat_maps_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/test/persistence_intervals_test.cpp b/src/Persistence_representations/test/persistence_intervals_test.cpp index 513e042d..3b7a2049 100644 --- a/src/Persistence_representations/test/persistence_intervals_test.cpp +++ b/src/Persistence_representations/test/persistence_intervals_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp b/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp index 3b334533..48d6e8ed 100644 --- a/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp +++ b/src/Persistence_representations/test/persistence_intervals_with_distances_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp b/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp index 36e935ac..f73da751 100644 --- a/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp +++ b/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/test/persistence_lanscapes_test.cpp b/src/Persistence_representations/test/persistence_lanscapes_test.cpp index 5b368462..21ef18a0 100644 --- a/src/Persistence_representations/test/persistence_lanscapes_test.cpp +++ b/src/Persistence_representations/test/persistence_lanscapes_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/test/read_persistence_from_file_test.cpp b/src/Persistence_representations/test/read_persistence_from_file_test.cpp index ac0438d7..88b686f1 100644 --- a/src/Persistence_representations/test/read_persistence_from_file_test.cpp +++ b/src/Persistence_representations/test/read_persistence_from_file_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/test/vector_representation_test.cpp b/src/Persistence_representations/test/vector_representation_test.cpp index 9c038727..568727a5 100644 --- a/src/Persistence_representations/test/vector_representation_test.cpp +++ b/src/Persistence_representations/test/vector_representation_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp index 4edaffdc..3d088b58 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp index e557e82d..48000bb1 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp index a700724c..8a96f1b0 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp index 66bf9416..5a657b13 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp index ac6ec212..8d67a54d 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp index 6a3cc2a9..29170c32 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp index 40ec56ce..995771b9 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp index d351e3b6..cf6e07cb 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp index aa051497..519cc47d 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp index ca67c74f..6155727a 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp index e457c6f4..dd6e1a5b 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp index b6b35fa1..13d2133f 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp index d171d809..451be77f 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp index 656958cc..09a56869 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp +++ b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp index 06c61bbf..04a0ada4 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp index 73d83de2..1093c1aa 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp index 313d09f5..16b76497 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp index 9ae2e670..4d772086 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp index 5c114f27..1fe03640 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp index 14c84d51..f92cde72 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp index b732cb82..baec6aeb 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp index 91643e2a..e94dacdb 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp index 0964eca3..d510c3df 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp index 079bd76e..4e20f37f 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp index 663555a6..89e42f0f 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp index a2e8b226..541dd25f 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp index 842c36c4..bbc50c98 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp index 5060ca8b..f974c3d3 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp index b449be3e..de08fcfe 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2016 Inria diff --git a/src/Persistent_cohomology/doc/COPYRIGHT b/src/Persistent_cohomology/doc/COPYRIGHT index 6cde9520..61f17f6d 100644 --- a/src/Persistent_cohomology/doc/COPYRIGHT +++ b/src/Persistent_cohomology/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): Clément Maria +Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h index 6fd19706..46b784d8 100644 --- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h +++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Persistent_cohomology/example/custom_persistence_sort.cpp b/src/Persistent_cohomology/example/custom_persistence_sort.cpp index b2aed37c..be74cf50 100644 --- a/src/Persistent_cohomology/example/custom_persistence_sort.cpp +++ b/src/Persistent_cohomology/example/custom_persistence_sort.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria diff --git a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp index f95dfeeb..3c91662f 100644 --- a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp +++ b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp index fbb25cea..84333e46 100644 --- a/src/Persistent_cohomology/example/plain_homology.cpp +++ b/src/Persistent_cohomology/example/plain_homology.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Marc Glisse * * Copyright (C) 2015 Inria diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index bee54ded..944b6d35 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h index 0d6c0f82..0673625c 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h index 716d91cd..1754a2ec 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h index 9d7edfe6..480be389 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Rips_complex/doc/COPYRIGHT b/src/Rips_complex/doc/COPYRIGHT index 2c31a0d6..61f17f6d 100644 --- a/src/Rips_complex/doc/COPYRIGHT +++ b/src/Rips_complex/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): Clément Maria, Pawel Dlotko, Vincent Rouvreau +Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Rips_complex/doc/Intro_rips_complex.h b/src/Rips_complex/doc/Intro_rips_complex.h index 6e5103ac..b2840686 100644 --- a/src/Rips_complex/doc/Intro_rips_complex.h +++ b/src/Rips_complex/doc/Intro_rips_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria, Pawel Dlotko, Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/Rips_complex/include/gudhi/Rips_complex.h b/src/Rips_complex/include/gudhi/Rips_complex.h index 958abbe2..d767dc1b 100644 --- a/src/Rips_complex/include/gudhi/Rips_complex.h +++ b/src/Rips_complex/include/gudhi/Rips_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria, Pawel Dlotko, Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/Rips_complex/include/gudhi/Sparse_rips_complex.h b/src/Rips_complex/include/gudhi/Sparse_rips_complex.h index 081a0233..1b250818 100644 --- a/src/Rips_complex/include/gudhi/Sparse_rips_complex.h +++ b/src/Rips_complex/include/gudhi/Sparse_rips_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Marc Glisse * * Copyright (C) 2018 Inria diff --git a/src/Rips_complex/test/test_rips_complex.cpp b/src/Rips_complex/test/test_rips_complex.cpp index cc8745cf..1225f8df 100644 --- a/src/Rips_complex/test/test_rips_complex.cpp +++ b/src/Rips_complex/test/test_rips_complex.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp index f634a2ea..585de4a0 100644 --- a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp +++ b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko, Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp index 98bc6eba..ad429e11 100644 --- a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp +++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko, Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/Rips_complex/utilities/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp index a6357847..daa7e1db 100644 --- a/src/Rips_complex/utilities/rips_persistence.cpp +++ b/src/Rips_complex/utilities/rips_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Rips_complex/utilities/sparse_rips_persistence.cpp b/src/Rips_complex/utilities/sparse_rips_persistence.cpp index a7db4ec6..1a86eafe 100644 --- a/src/Rips_complex/utilities/sparse_rips_persistence.cpp +++ b/src/Rips_complex/utilities/sparse_rips_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Marc Glisse, Clément Maria * * Copyright (C) 2018 Inria diff --git a/src/Simplex_tree/doc/COPYRIGHT b/src/Simplex_tree/doc/COPYRIGHT index 6cde9520..61f17f6d 100644 --- a/src/Simplex_tree/doc/COPYRIGHT +++ b/src/Simplex_tree/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): Clément Maria +Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Simplex_tree/doc/Intro_simplex_tree.h b/src/Simplex_tree/doc/Intro_simplex_tree.h index b01e3e92..800879fe 100644 --- a/src/Simplex_tree/doc/Intro_simplex_tree.h +++ b/src/Simplex_tree/doc/Intro_simplex_tree.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp index fb1a3a4c..d716fb1f 100644 --- a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp +++ b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2017 Inria diff --git a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp index 8803dbb2..e455c426 100644 --- a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp +++ b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp index 34bfd77c..494f8b1d 100644 --- a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp +++ b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/example/mini_simplex_tree.cpp b/src/Simplex_tree/example/mini_simplex_tree.cpp index 6370b508..bbc582c7 100644 --- a/src/Simplex_tree/example/mini_simplex_tree.cpp +++ b/src/Simplex_tree/example/mini_simplex_tree.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Marc Glisse * * Copyright (C) 2015 Inria diff --git a/src/Simplex_tree/example/simple_simplex_tree.cpp b/src/Simplex_tree/example/simple_simplex_tree.cpp index 6a0a7fc0..4353939f 100644 --- a/src/Simplex_tree/example/simple_simplex_tree.cpp +++ b/src/Simplex_tree/example/simple_simplex_tree.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp index eb0282f2..f6dfa53c 100644 --- a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp +++ b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 14b27610..fafdb01c 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h index 7b6dea0f..efccf2f2 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h index 26bf0569..ae140859 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h index d2b7d8d9..b53bad29 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h b/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h index 4df7833c..3e395ae2 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clément Maria * * Copyright (C) 2014 Inria diff --git a/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp index e729cf00..c0615b12 100644 --- a/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp @@ -1,3 +1,13 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + #include #include #include diff --git a/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp index 19ce3321..fab25eb8 100644 --- a/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp @@ -1,3 +1,13 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + #include #include #include diff --git a/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp index ecb9f025..28c29489 100644 --- a/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp @@ -1,3 +1,13 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + #include #define BOOST_TEST_DYN_LINK diff --git a/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp index dc37375c..97347992 100644 --- a/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp @@ -1,3 +1,13 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + #include #define BOOST_TEST_DYN_LINK diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp index f2a5d54d..58bfa8db 100644 --- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp @@ -1,3 +1,13 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2014 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + #include #include #include diff --git a/src/Skeleton_blocker/concept/SkeletonBlockerDS.h b/src/Skeleton_blocker/concept/SkeletonBlockerDS.h index 52db1766..0c2014bd 100644 --- a/src/Skeleton_blocker/concept/SkeletonBlockerDS.h +++ b/src/Skeleton_blocker/concept/SkeletonBlockerDS.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h b/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h index 5987f6fb..9a81b79c 100644 --- a/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h +++ b/src/Skeleton_blocker/concept/SkeletonBlockerGeometricDS.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/doc/COPYRIGHT b/src/Skeleton_blocker/doc/COPYRIGHT index 5b606ac2..61f17f6d 100644 --- a/src/Skeleton_blocker/doc/COPYRIGHT +++ b/src/Skeleton_blocker/doc/COPYRIGHT @@ -1,18 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): David Salinas -Copyright (C) 2015 Inria +Author(s): Vincent Rouvreau -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +Copyright (C) 2015 Inria -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h index 169cd3b3..bcca851f 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h index 533cc777..9f145013 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h index c9ebd9ad..d348b696 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h index 90793843..52300493 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_off_io.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h index 3d3d8425..772e33aa 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h index a931ec98..0c0cc624 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h index 115a3a84..12fe6469 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h index a9e50d11..4c48ff31 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h index d2aa59d3..91e79b42 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Top_faces.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h index 66371d0e..4f51f572 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_blockers_iterators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h index 63b963dd..154388a1 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_edges_iterators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h index 1968f43b..920f8cb6 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h index ca4d46f6..37c0b4d3 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h index 81c6e82b..49e94256 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_vertices_iterators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h index a8d2420d..125c6387 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h index c7946516..b8f75e0f 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h index 38fd32fd..a2637da3 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h index f8121fcc..404f04f9 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_simplifiable_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp b/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp index 8265d763..4336e33b 100644 --- a/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp +++ b/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria @@ -9,6 +7,7 @@ * Modification(s): * - YYYY/MM Author: Description of the modification */ + #include #include #include diff --git a/src/Spatial_searching/doc/Intro_spatial_searching.h b/src/Spatial_searching/doc/Intro_spatial_searching.h index 5cc458e4..30805570 100644 --- a/src/Spatial_searching/doc/Intro_spatial_searching.h +++ b/src/Spatial_searching/doc/Intro_spatial_searching.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Spatial_searching/include/gudhi/Kd_tree_search.h b/src/Spatial_searching/include/gudhi/Kd_tree_search.h index 92f3f11b..9e4666bb 100644 --- a/src/Spatial_searching/include/gudhi/Kd_tree_search.h +++ b/src/Spatial_searching/include/gudhi/Kd_tree_search.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Spatial_searching/test/test_Kd_tree_search.cpp b/src/Spatial_searching/test/test_Kd_tree_search.cpp index 37d7c073..d6c6fba3 100644 --- a/src/Spatial_searching/test/test_Kd_tree_search.cpp +++ b/src/Spatial_searching/test/test_Kd_tree_search.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Subsampling/doc/Intro_subsampling.h b/src/Subsampling/doc/Intro_subsampling.h index 927e2b72..1c84fb2e 100644 --- a/src/Subsampling/doc/Intro_subsampling.h +++ b/src/Subsampling/doc/Intro_subsampling.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Subsampling/include/gudhi/choose_n_farthest_points.h b/src/Subsampling/include/gudhi/choose_n_farthest_points.h index f99df0fa..66421a69 100644 --- a/src/Subsampling/include/gudhi/choose_n_farthest_points.h +++ b/src/Subsampling/include/gudhi/choose_n_farthest_points.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/Subsampling/include/gudhi/pick_n_random_points.h b/src/Subsampling/include/gudhi/pick_n_random_points.h index f7734238..a67b2b84 100644 --- a/src/Subsampling/include/gudhi/pick_n_random_points.h +++ b/src/Subsampling/include/gudhi/pick_n_random_points.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/Subsampling/include/gudhi/sparsify_point_set.h b/src/Subsampling/include/gudhi/sparsify_point_set.h index d39d3906..b30cec80 100644 --- a/src/Subsampling/include/gudhi/sparsify_point_set.h +++ b/src/Subsampling/include/gudhi/sparsify_point_set.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Subsampling/test/test_choose_n_farthest_points.cpp b/src/Subsampling/test/test_choose_n_farthest_points.cpp index 7e3dfb21..5c4bd4cb 100644 --- a/src/Subsampling/test/test_choose_n_farthest_points.cpp +++ b/src/Subsampling/test/test_choose_n_farthest_points.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/Subsampling/test/test_pick_n_random_points.cpp b/src/Subsampling/test/test_pick_n_random_points.cpp index 49138ab4..018fb8d2 100644 --- a/src/Subsampling/test/test_pick_n_random_points.cpp +++ b/src/Subsampling/test/test_pick_n_random_points.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/Subsampling/test/test_sparsify_point_set.cpp b/src/Subsampling/test/test_sparsify_point_set.cpp index b2a55663..587ab3ad 100644 --- a/src/Subsampling/test/test_sparsify_point_set.cpp +++ b/src/Subsampling/test/test_sparsify_point_set.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/benchmark/RIB_exporter.h b/src/Tangential_complex/benchmark/RIB_exporter.h index 7e8a8ed9..4cec0603 100644 --- a/src/Tangential_complex/benchmark/RIB_exporter.h +++ b/src/Tangential_complex/benchmark/RIB_exporter.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/benchmark/XML_exporter.h b/src/Tangential_complex/benchmark/XML_exporter.h index afa67288..16b62eb6 100644 --- a/src/Tangential_complex/benchmark/XML_exporter.h +++ b/src/Tangential_complex/benchmark/XML_exporter.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/doc/COPYRIGHT b/src/Tangential_complex/doc/COPYRIGHT index f9f92471..61f17f6d 100644 --- a/src/Tangential_complex/doc/COPYRIGHT +++ b/src/Tangential_complex/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): Clement Jamin +Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Tangential_complex/doc/Intro_tangential_complex.h b/src/Tangential_complex/doc/Intro_tangential_complex.h index 3bd84aa8..ce277185 100644 --- a/src/Tangential_complex/doc/Intro_tangential_complex.h +++ b/src/Tangential_complex/doc/Intro_tangential_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h index 7fdd68ee..b3bac58e 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h index 8f008236..4881bdd5 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h index 6de698ab..352531da 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h index 14e71797..ee6ed9ba 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Tangential_complex/test/test_tangential_complex.cpp b/src/Tangential_complex/test/test_tangential_complex.cpp index 94038c29..46caec54 100644 --- a/src/Tangential_complex/test/test_tangential_complex.cpp +++ b/src/Tangential_complex/test/test_tangential_complex.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/Toplex_map/benchmark/benchmark_tm.cpp b/src/Toplex_map/benchmark/benchmark_tm.cpp index f132b783..feb5d01c 100644 --- a/src/Toplex_map/benchmark/benchmark_tm.cpp +++ b/src/Toplex_map/benchmark/benchmark_tm.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi, Vincent Rouvreau * * Copyright (C) 2018 INRIA diff --git a/src/Toplex_map/doc/Intro_Toplex_map.h b/src/Toplex_map/doc/Intro_Toplex_map.h index 58c22f64..cd7705b6 100644 --- a/src/Toplex_map/doc/Intro_Toplex_map.h +++ b/src/Toplex_map/doc/Intro_Toplex_map.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi, Vincent Rouvreau * * Copyright (C) 2017 INRIA diff --git a/src/Toplex_map/example/simple_toplex_map.cpp b/src/Toplex_map/example/simple_toplex_map.cpp index 27ce0fbe..7538c989 100644 --- a/src/Toplex_map/example/simple_toplex_map.cpp +++ b/src/Toplex_map/example/simple_toplex_map.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 diff --git a/src/Toplex_map/include/gudhi/Lazy_toplex_map.h b/src/Toplex_map/include/gudhi/Lazy_toplex_map.h index c328e43b..dcc128fa 100644 --- a/src/Toplex_map/include/gudhi/Lazy_toplex_map.h +++ b/src/Toplex_map/include/gudhi/Lazy_toplex_map.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi, Vincent Rouvreau * * Copyright (C) 2018 INRIA diff --git a/src/Toplex_map/include/gudhi/Toplex_map.h b/src/Toplex_map/include/gudhi/Toplex_map.h index 7deebef7..95e94938 100644 --- a/src/Toplex_map/include/gudhi/Toplex_map.h +++ b/src/Toplex_map/include/gudhi/Toplex_map.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi, Vincent Rouvreau * * Copyright (C) 2018 INRIA diff --git a/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp b/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp index 2cca9c46..639bf35a 100644 --- a/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi, Vincent Rouvreau * * Copyright (C) 2018 INRIA diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp index c4c37bb3..24ec679b 100644 --- a/src/Toplex_map/test/toplex_map_unit_test.cpp +++ b/src/Toplex_map/test/toplex_map_unit_test.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author: François Godi, Vincent Rouvreau * * Copyright (C) 2018 INRIA diff --git a/src/Witness_complex/doc/COPYRIGHT b/src/Witness_complex/doc/COPYRIGHT index 25a700cf..61f17f6d 100644 --- a/src/Witness_complex/doc/COPYRIGHT +++ b/src/Witness_complex/doc/COPYRIGHT @@ -1,19 +1,12 @@ -The files of this directory are part of the Gudhi Library. The Gudhi library -(Geometric Understanding in Higher Dimensions) is a generic C++ library for -computational topology. +The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. -Author(s): Siargey Kachanovich +Author(s): Vincent Rouvreau Copyright (C) 2015 Inria -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. +This gives everyone the freedoms to use openFrameworks in any context: +commercial or non-commercial, public or private, open or closed source. -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . +You should have received a copy of the MIT License along with this program. +If not, see https://opensource.org/licenses/MIT. \ No newline at end of file diff --git a/src/Witness_complex/example/generators.h b/src/Witness_complex/example/generators.h index 214cd059..1900e1e4 100644 --- a/src/Witness_complex/example/generators.h +++ b/src/Witness_complex/example/generators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2015 Inria diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h index 56b3e808..2ae1d6e0 100644 --- a/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h +++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h index 6aa9c0dd..4f8fddba 100644 --- a/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h +++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h index d2bf00ce..c9767982 100644 --- a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2015 Inria diff --git a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h index a4430301..75ea38e9 100644 --- a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2015 Inria diff --git a/src/Witness_complex/include/gudhi/Strong_witness_complex.h b/src/Witness_complex/include/gudhi/Strong_witness_complex.h index 5861ec62..b3699f77 100644 --- a/src/Witness_complex/include/gudhi/Strong_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Strong_witness_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2015 Inria diff --git a/src/Witness_complex/include/gudhi/Witness_complex.h b/src/Witness_complex/include/gudhi/Witness_complex.h index 375a79ac..d655c7f6 100644 --- a/src/Witness_complex/include/gudhi/Witness_complex.h +++ b/src/Witness_complex/include/gudhi/Witness_complex.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2015 Inria diff --git a/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h index ae7ad0f3..5845728a 100644 --- a/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h +++ b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2015 Inria diff --git a/src/Witness_complex/utilities/strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp index a06bb1fa..75ba1f4b 100644 --- a/src/Witness_complex/utilities/strong_witness_persistence.cpp +++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/Witness_complex/utilities/weak_witness_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp index 35d09f95..0e5b9cc1 100644 --- a/src/Witness_complex/utilities/weak_witness_persistence.cpp +++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Siargey Kachanovich * * Copyright (C) 2016 Inria diff --git a/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp b/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp index a03d1757..0fc145fd 100644 --- a/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp +++ b/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h index 826cbc3e..38abc06d 100644 --- a/src/common/include/gudhi/Debug_utils.h +++ b/src/common/include/gudhi/Debug_utils.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): David Salinas * * Copyright (C) 2014 Inria diff --git a/src/common/include/gudhi/Null_output_iterator.h b/src/common/include/gudhi/Null_output_iterator.h index 81309080..3d03bca6 100644 --- a/src/common/include/gudhi/Null_output_iterator.h +++ b/src/common/include/gudhi/Null_output_iterator.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Marc Glisse * * Copyright (C) 2017 Inria diff --git a/src/common/include/gudhi/Points_3D_off_io.h b/src/common/include/gudhi/Points_3D_off_io.h index 2112cc05..2d110af3 100644 --- a/src/common/include/gudhi/Points_3D_off_io.h +++ b/src/common/include/gudhi/Points_3D_off_io.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h index a0fca77e..99371d56 100644 --- a/src/common/include/gudhi/Points_off_io.h +++ b/src/common/include/gudhi/Points_off_io.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/common/include/gudhi/Simple_object_pool.h b/src/common/include/gudhi/Simple_object_pool.h index 164849e1..d1482b44 100644 --- a/src/common/include/gudhi/Simple_object_pool.h +++ b/src/common/include/gudhi/Simple_object_pool.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Marc Glisse * * Copyright (C) 2015 Inria diff --git a/src/common/include/gudhi/Unitary_tests_utils.h b/src/common/include/gudhi/Unitary_tests_utils.h index 5ab20af8..4ad4dae8 100644 --- a/src/common/include/gudhi/Unitary_tests_utils.h +++ b/src/common/include/gudhi/Unitary_tests_utils.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2017 Inria diff --git a/src/common/include/gudhi/allocator.h b/src/common/include/gudhi/allocator.h index e828f441..b7ccd180 100644 --- a/src/common/include/gudhi/allocator.h +++ b/src/common/include/gudhi/allocator.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Marc Glisse * * Copyright (C) 2015 Inria diff --git a/src/common/include/gudhi/console_color.h b/src/common/include/gudhi/console_color.h index 7681ae66..f9167119 100644 --- a/src/common/include/gudhi/console_color.h +++ b/src/common/include/gudhi/console_color.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/common/include/gudhi/random_point_generators.h b/src/common/include/gudhi/random_point_generators.h index dbaf0ab1..7889b9ca 100644 --- a/src/common/include/gudhi/random_point_generators.h +++ b/src/common/include/gudhi/random_point_generators.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Jamin * * Copyright (C) 2016 Inria diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h index 1365b560..98335552 100644 --- a/src/common/include/gudhi/reader_utils.h +++ b/src/common/include/gudhi/reader_utils.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Clement Maria, Pawel Dlotko, Clement Jamin * * Copyright (C) 2014 Inria diff --git a/src/common/include/gudhi/writing_persistence_to_file.h b/src/common/include/gudhi/writing_persistence_to_file.h index c0d4929e..2e36b831 100644 --- a/src/common/include/gudhi/writing_persistence_to_file.h +++ b/src/common/include/gudhi/writing_persistence_to_file.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Pawel Dlotko * * Copyright (C) 2017 Swansea University, UK diff --git a/src/common/test/test_distance_matrix_reader.cpp b/src/common/test/test_distance_matrix_reader.cpp index c25b2f53..bb619a29 100644 --- a/src/common/test/test_distance_matrix_reader.cpp +++ b/src/common/test/test_distance_matrix_reader.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/common/test/test_persistence_intervals_reader.cpp b/src/common/test/test_persistence_intervals_reader.cpp index 0d0f515e..8fb4377d 100644 --- a/src/common/test/test_persistence_intervals_reader.cpp +++ b/src/common/test/test_persistence_intervals_reader.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2017 Inria diff --git a/src/common/test/test_points_off_reader.cpp b/src/common/test/test_points_off_reader.cpp index 49fa1ec2..f190a13e 100644 --- a/src/common/test/test_points_off_reader.cpp +++ b/src/common/test/test_points_off_reader.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2015 Inria diff --git a/src/common/utilities/off_file_from_shape_generator.cpp b/src/common/utilities/off_file_from_shape_generator.cpp index eb31e8bc..6efef4fc 100644 --- a/src/common/utilities/off_file_from_shape_generator.cpp +++ b/src/common/utilities/off_file_from_shape_generator.cpp @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2014 Inria diff --git a/src/cython/include/Alpha_complex_interface.h b/src/cython/include/Alpha_complex_interface.h index 1199b741..b3553d32 100644 --- a/src/cython/include/Alpha_complex_interface.h +++ b/src/cython/include/Alpha_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Bottleneck_distance_interface.h b/src/cython/include/Bottleneck_distance_interface.h index 22c9a97a..a4f3eaf1 100644 --- a/src/cython/include/Bottleneck_distance_interface.h +++ b/src/cython/include/Bottleneck_distance_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Cubical_complex_interface.h b/src/cython/include/Cubical_complex_interface.h index 7d32914c..648598e1 100644 --- a/src/cython/include/Cubical_complex_interface.h +++ b/src/cython/include/Cubical_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Euclidean_strong_witness_complex_interface.h b/src/cython/include/Euclidean_strong_witness_complex_interface.h index 90bd54ac..c1c72737 100644 --- a/src/cython/include/Euclidean_strong_witness_complex_interface.h +++ b/src/cython/include/Euclidean_strong_witness_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Euclidean_witness_complex_interface.h b/src/cython/include/Euclidean_witness_complex_interface.h index 0c01a741..5d7dbdc2 100644 --- a/src/cython/include/Euclidean_witness_complex_interface.h +++ b/src/cython/include/Euclidean_witness_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Nerve_gic_interface.h b/src/cython/include/Nerve_gic_interface.h index 729b39fb..5e7f8ae6 100644 --- a/src/cython/include/Nerve_gic_interface.h +++ b/src/cython/include/Nerve_gic_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2018 Inria diff --git a/src/cython/include/Off_reader_interface.h b/src/cython/include/Off_reader_interface.h index 4b3643be..e6e1f931 100644 --- a/src/cython/include/Off_reader_interface.h +++ b/src/cython/include/Off_reader_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Persistent_cohomology_interface.h b/src/cython/include/Persistent_cohomology_interface.h index 64e2ddc8..8c79e6f3 100644 --- a/src/cython/include/Persistent_cohomology_interface.h +++ b/src/cython/include/Persistent_cohomology_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Reader_utils_interface.h b/src/cython/include/Reader_utils_interface.h index 5bddf9ce..5f0deb87 100644 --- a/src/cython/include/Reader_utils_interface.h +++ b/src/cython/include/Reader_utils_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2017 Inria diff --git a/src/cython/include/Rips_complex_interface.h b/src/cython/include/Rips_complex_interface.h index f818a2ed..a66b0e5b 100644 --- a/src/cython/include/Rips_complex_interface.h +++ b/src/cython/include/Rips_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Simplex_tree_interface.h b/src/cython/include/Simplex_tree_interface.h index c15a44a5..06f31341 100644 --- a/src/cython/include/Simplex_tree_interface.h +++ b/src/cython/include/Simplex_tree_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Strong_witness_complex_interface.h b/src/cython/include/Strong_witness_complex_interface.h index 4c333da8..cda5b514 100644 --- a/src/cython/include/Strong_witness_complex_interface.h +++ b/src/cython/include/Strong_witness_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Subsampling_interface.h b/src/cython/include/Subsampling_interface.h index bc390485..cdda851f 100644 --- a/src/cython/include/Subsampling_interface.h +++ b/src/cython/include/Subsampling_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Tangential_complex_interface.h b/src/cython/include/Tangential_complex_interface.h index 7c3f2789..698226cc 100644 --- a/src/cython/include/Tangential_complex_interface.h +++ b/src/cython/include/Tangential_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria diff --git a/src/cython/include/Witness_complex_interface.h b/src/cython/include/Witness_complex_interface.h index 609277d6..45e14253 100644 --- a/src/cython/include/Witness_complex_interface.h +++ b/src/cython/include/Witness_complex_interface.h @@ -1,7 +1,5 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. * Author(s): Vincent Rouvreau * * Copyright (C) 2016 Inria -- cgit v1.2.3 From ed59f5bf1b35269cf4324dbb72df863bf50dac01 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 20 Aug 2019 14:33:21 +0200 Subject: Add include iostream when needed, and remove CGAL includes when not required --- src/Subsampling/example/example_choose_n_farthest_points.cpp | 1 + src/Subsampling/example/example_custom_kernel.cpp | 4 +--- src/Subsampling/example/example_pick_n_random_points.cpp | 1 + src/Subsampling/example/example_sparsify_point_set.cpp | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/Subsampling/example/example_choose_n_farthest_points.cpp b/src/Subsampling/example/example_choose_n_farthest_points.cpp index ebf631fc..5cfeb4d8 100644 --- a/src/Subsampling/example/example_choose_n_farthest_points.cpp +++ b/src/Subsampling/example/example_choose_n_farthest_points.cpp @@ -3,6 +3,7 @@ #include #include +#include #include #include diff --git a/src/Subsampling/example/example_custom_kernel.cpp b/src/Subsampling/example/example_custom_kernel.cpp index 2d42bdde..f1eb757b 100644 --- a/src/Subsampling/example/example_custom_kernel.cpp +++ b/src/Subsampling/example/example_custom_kernel.cpp @@ -1,8 +1,6 @@ #include -#include -#include - +#include #include #include diff --git a/src/Subsampling/example/example_pick_n_random_points.cpp b/src/Subsampling/example/example_pick_n_random_points.cpp index 1e38e405..25266403 100644 --- a/src/Subsampling/example/example_pick_n_random_points.cpp +++ b/src/Subsampling/example/example_pick_n_random_points.cpp @@ -3,6 +3,7 @@ #include #include +#include #include #include diff --git a/src/Subsampling/example/example_sparsify_point_set.cpp b/src/Subsampling/example/example_sparsify_point_set.cpp index b35a18d9..a8caa720 100644 --- a/src/Subsampling/example/example_sparsify_point_set.cpp +++ b/src/Subsampling/example/example_sparsify_point_set.cpp @@ -3,6 +3,7 @@ #include #include +#include #include #include -- cgit v1.2.3 From 87a8d7962ea13f11e591462ec5757e9e1747dc07 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 20 Aug 2019 17:05:13 +0200 Subject: Fix issue #10 and modify main and installation documentations accordingly --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 13 +++++++++ src/Alpha_complex/include/gudhi/Alpha_complex_3d.h | 11 +++++-- src/Bottleneck_distance/include/gudhi/Bottleneck.h | 10 ++++++- .../include/gudhi/Skeleton_blocker_contractor.h | 9 +++++- src/Nerve_GIC/include/gudhi/GIC.h | 8 +++++ .../include/gudhi/Kd_tree_search.h | 13 +++++++++ .../include/gudhi/Tangential_complex.h | 12 ++++++++ .../gudhi/Euclidean_strong_witness_complex.h | 14 +++++++++ .../include/gudhi/Euclidean_witness_complex.h | 14 +++++++++ src/common/doc/installation.h | 12 ++++---- src/common/doc/main_page.md | 6 ++-- src/common/include/gudhi/random_point_generators.h | 7 +++++ src/cython/doc/alpha_complex_sum.inc | 34 +++++++++++----------- src/cython/doc/installation.rst | 18 +++++------- src/cython/doc/tangential_complex_sum.inc | 22 +++++++------- src/cython/doc/witness_complex_sum.inc | 28 +++++++++--------- 16 files changed, 166 insertions(+), 65 deletions(-) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index cdc1ed1d..8919cdb9 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -5,6 +5,7 @@ * Copyright (C) 2015 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL and Eigen3 * - YYYY/MM Author: Description of the modification */ @@ -23,6 +24,9 @@ #include #include // for CGAL::Identity_property_map #include +#include // for CGAL_VERSION_NR + +#include // for EIGEN_VERSION_AT_LEAST #include #include @@ -33,6 +37,15 @@ #include #include // for std::iota +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + +#if !EIGEN_VERSION_AT_LEAST(3,1,0) +# error Alpha_complex_3d is only available for Eigen3 >= 3.1.0 installed with CGAL +#endif + namespace Gudhi { namespace alpha_complex { diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h index 9bff42b5..13ebb9c1 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h @@ -5,6 +5,7 @@ * Copyright (C) 2018 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL and Eigen3 * - YYYY/MM Author: Description of the modification */ @@ -32,7 +33,9 @@ #include #include #include -#include +#include // for CGAL_VERSION_NR + +#include // for EIGEN_VERSION_AT_LEAST #include @@ -45,11 +48,15 @@ #include // for std::conditional and std::enable_if #include // for numeric_limits<> -#if CGAL_VERSION_NR < 1041101000 // Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 # error Alpha_complex_3d is only available for CGAL >= 4.11 #endif +#if !EIGEN_VERSION_AT_LEAST(3,1,0) +# error Alpha_complex_3d is only available for Eigen3 >= 3.1.0 installed with CGAL +#endif + namespace Gudhi { namespace alpha_complex { diff --git a/src/Bottleneck_distance/include/gudhi/Bottleneck.h b/src/Bottleneck_distance/include/gudhi/Bottleneck.h index d31c82ee..82ba9f68 100644 --- a/src/Bottleneck_distance/include/gudhi/Bottleneck.h +++ b/src/Bottleneck_distance/include/gudhi/Bottleneck.h @@ -5,8 +5,9 @@ * Copyright (C) 2015 Inria * * Modification(s): - * - YYYY/MM Author: Description of the modification * - 2019/06 Vincent Rouvreau : Fix doxygen warning. + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL + * - YYYY/MM Author: Description of the modification */ #ifndef BOTTLENECK_H_ @@ -14,6 +15,8 @@ #include +#include // for CGAL_VERSION_NR + #include #include // for max #include // for numeric_limits @@ -21,6 +24,11 @@ #include #include // FLT_EVAL_METHOD +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + namespace Gudhi { namespace persistence_diagram { diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h index d9f8d9f4..c2b3157c 100644 --- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h +++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h @@ -5,6 +5,7 @@ * Copyright (C) 2014 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL * - YYYY/MM Author: Description of the modification */ @@ -24,8 +25,9 @@ #include #include -// todo remove the queue to be independent from cgald +// todo remove the queue to be independent from cgal #include +#include // for CGAL_VERSION_NR #include #include @@ -36,6 +38,11 @@ #include // for pair #include +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + namespace Gudhi { namespace contraction { diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h index a1621ad9..fc6a2a91 100644 --- a/src/Nerve_GIC/include/gudhi/GIC.h +++ b/src/Nerve_GIC/include/gudhi/GIC.h @@ -5,6 +5,7 @@ * Copyright (C) 2017 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL * - YYYY/MM Author: Description of the modification */ @@ -34,6 +35,8 @@ #include #include +#include // for CGAL_VERSION_NR + #include #include #include @@ -45,6 +48,11 @@ #include #include +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + namespace Gudhi { namespace cover_complex { diff --git a/src/Spatial_searching/include/gudhi/Kd_tree_search.h b/src/Spatial_searching/include/gudhi/Kd_tree_search.h index 9e4666bb..fedbb32e 100644 --- a/src/Spatial_searching/include/gudhi/Kd_tree_search.h +++ b/src/Spatial_searching/include/gudhi/Kd_tree_search.h @@ -5,6 +5,7 @@ * Copyright (C) 2016 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL and Eigen3 * - YYYY/MM Author: Description of the modification */ @@ -17,6 +18,9 @@ #include #include #include +#include // for CGAL_VERSION_NR + +#include // for EIGEN_VERSION_AT_LEAST #include #include @@ -24,6 +28,15 @@ #include #include +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + +#if !EIGEN_VERSION_AT_LEAST(3,1,0) +# error Alpha_complex_3d is only available for Eigen3 >= 3.1.0 installed with CGAL +#endif + namespace Gudhi { namespace spatial_searching { diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h index b3bac58e..f59476b1 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h @@ -5,6 +5,7 @@ * Copyright (C) 2016 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL and Eigen3 * - YYYY/MM Author: Description of the modification */ @@ -29,9 +30,11 @@ #include #include #include +#include // for CGAL_VERSION_NR #include #include +#include // for EIGEN_VERSION_AT_LEAST #include #include @@ -62,6 +65,15 @@ // #define GUDHI_TC_EXPORT_NORMALS // Only for 3D surfaces (k=2, d=3) +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + +#if !EIGEN_VERSION_AT_LEAST(3,1,0) +# error Alpha_complex_3d is only available for Eigen3 >= 3.1.0 installed with CGAL +#endif + namespace sps = Gudhi::spatial_searching; namespace Gudhi { diff --git a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h index c9767982..7d3c2d6d 100644 --- a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h @@ -5,6 +5,7 @@ * Copyright (C) 2015 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL and Eigen3 * - YYYY/MM Author: Description of the modification */ @@ -15,9 +16,22 @@ #include #include +#include // for CGAL_VERSION_NR + +#include // for EIGEN_VERSION_AT_LEAST + #include #include +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + +#if !EIGEN_VERSION_AT_LEAST(3,1,0) +# error Alpha_complex_3d is only available for Eigen3 >= 3.1.0 installed with CGAL +#endif + namespace Gudhi { namespace witness_complex { diff --git a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h index 75ea38e9..21682ec4 100644 --- a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h @@ -5,6 +5,7 @@ * Copyright (C) 2015 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL and Eigen3 * - YYYY/MM Author: Description of the modification */ @@ -15,11 +16,24 @@ #include #include +#include // for CGAL_VERSION_NR + +#include // for EIGEN_VERSION_AT_LEAST + #include #include #include #include +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + +#if !EIGEN_VERSION_AT_LEAST(3,1,0) +# error Alpha_complex_3d is only available for Eigen3 >= 3.1.0 installed with CGAL +#endif + namespace Gudhi { namespace witness_complex { diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 2629d12c..02d3c73a 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -60,8 +60,8 @@ make doxygen * Having GMP version 4.2 or higher installed is recommended. * * \subsection cgal CGAL - * The \ref alpha_complex data structure, \ref bottleneck_distance, and few examples requires CGAL, which is a C++ - * library which provides easy access to efficient and reliable geometric algorithms. + * Some GUDHI modules (cf. \ref main_page "modules list"), and few examples requires CGAL, a C++ library that provides + * easy access to efficient and reliable geometric algorithms. * * \note There is no need to install CGAL, you can just cmake . && make CGAL (or even * cmake -DCGAL_HEADER_ONLY=ON .), thereafter you will be able to compile @@ -125,13 +125,13 @@ make doxygen * \li
* Alpha_complex/alpha_complex_3d_persistence.cpp * - * \subsection eigen3 Eigen3 + * \subsection eigen Eigen * The \ref alpha_complex data structure and few examples requires - * Eigen3 is a C++ template library for linear algebra: + * Eigen is a C++ template library for linear algebra: * matrices, vectors, numerical solvers, and related algorithms. * - * The following examples/utilities require the Eigen3 and will not be - * built if Eigen3 is not installed: + * The following examples/utilities require the Eigen and will not be + * built if Eigen is not installed: * \li * Alpha_complex/Alpha_complex_from_off.cpp * \li diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index ea2474be..d8cbf97f 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -50,7 +50,7 @@ Author: Vincent Rouvreau
Introduced in: GUDHI 1.3.0
Copyright: MIT [(GPL v3)](../../licensing/)
- Requires: \ref eigen3 and \ref cgal ≥ 4.11.0 + Requires: \ref eigen ≥ 3.1.0 and \ref cgal ≥ 4.11.0 @@ -126,7 +126,7 @@ Author: Siargey Kachanovich
Introduced in: GUDHI 1.3.0
Copyright: MIT ([GPL v3](../../licensing/) for Euclidean version)
- Euclidean version requires: \ref eigen3 and \ref cgal ≥ 4.11.0 + Euclidean version requires: \ref eigen ≥ 3.1.0 and \ref cgal ≥ 4.11.0 @@ -324,7 +324,7 @@ Author: Clément Jamin
Introduced in: GUDHI 2.0.0
Copyright: MIT [(GPL v3)](../../licensing/)
- Requires: \ref eigen3 and \ref cgal ≥ 4.11.0 + Requires: \ref eigen ≥ 3.1.0 and \ref cgal ≥ 4.11.0 diff --git a/src/common/include/gudhi/random_point_generators.h b/src/common/include/gudhi/random_point_generators.h index 7889b9ca..fb69f832 100644 --- a/src/common/include/gudhi/random_point_generators.h +++ b/src/common/include/gudhi/random_point_generators.h @@ -5,6 +5,7 @@ * Copyright (C) 2016 Inria * * Modification(s): + * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL * - YYYY/MM Author: Description of the modification */ @@ -14,9 +15,15 @@ #include #include #include +#include // for CGAL_VERSION_NR #include // for vector<> +// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10 +#if CGAL_VERSION_NR < 1041101000 +# error Alpha_complex_3d is only available for CGAL >= 4.11 +#endif + namespace Gudhi { /////////////////////////////////////////////////////////////////////////////// diff --git a/src/cython/doc/alpha_complex_sum.inc b/src/cython/doc/alpha_complex_sum.inc index 9049e654..c5ba9dc7 100644 --- a/src/cython/doc/alpha_complex_sum.inc +++ b/src/cython/doc/alpha_complex_sum.inc @@ -1,20 +1,20 @@ .. table:: :widths: 30 50 20 - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ - | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | - | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | - | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | The filtration value of each simplex is computed as the square of the | | - | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: MIT (`GPL v3 `_) | - | | is then said to be Gabriel), and as the minimum of the filtration | | - | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 | - | | otherwise. All simplices that have a filtration value strictly | | - | | greater than a given alpha squared value are not inserted into the | | - | | complex. | | - | | | | - | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | - | | advised for better performance). | | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ - | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+ + | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | + | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | + | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The filtration value of each simplex is computed as the square of the | | + | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: MIT (`GPL v3 `_) | + | | is then said to be Gabriel), and as the minimum of the filtration | | + | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: `Eigen `__ :math:`\geq` 3.1.0 and `CGAL `__ :math:`\geq` 4.11.0 | + | | otherwise. All simplices that have a filtration value strictly | | + | | greater than a given alpha squared value are not inserted into the | | + | | complex. | | + | | | | + | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | + | | advised for better performance). | | + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+ + | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | + +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst index 02b889d0..e40a2ef9 100644 --- a/src/cython/doc/installation.rst +++ b/src/cython/doc/installation.rst @@ -144,12 +144,10 @@ Optional third-party library CGAL ==== -The :doc:`Alpha complex `, -:doc:`Tangential complex ` and -:doc:`Witness complex ` data structures, and -:doc:`Bottleneck distance ` requires CGAL, which is a -C++ library which provides easy access to efficient and reliable geometric -algorithms. +Some GUDHI modules (cf. :doc:`modules list `), and few examples +requires CGAL, a C++ library that provides easy access to efficient and +reliable geometric algorithms. + The procedure to install this library according to your operating system is detailed @@ -166,17 +164,17 @@ The following examples requires CGAL version ≥ 4.11.0: * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` -Eigen3 -====== +Eigen +===== The :doc:`Alpha complex `, :doc:`Tangential complex ` and :doc:`Witness complex ` data structures and few -examples requires `Eigen3 `_, a C++ template +examples requires `Eigen `_, a C++ template library for linear algebra: matrices, vectors, numerical solvers, and related algorithms. -The following examples require the `Eigen3 `_: +The following examples require `Eigen `_ version ≥ 3.1.0: .. only:: builder_html diff --git a/src/cython/doc/tangential_complex_sum.inc b/src/cython/doc/tangential_complex_sum.inc index c8bc1177..d84aa433 100644 --- a/src/cython/doc/tangential_complex_sum.inc +++ b/src/cython/doc/tangential_complex_sum.inc @@ -1,14 +1,14 @@ .. table:: :widths: 30 50 20 - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ - | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | - | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | - | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 | - | | an unknown manifold. The running time depends only linearly on the | | - | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: MIT (`GPL v3 `_) | - | | dimension :math:`k`. | | - | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ - | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+ + | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | + | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | + | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 | + | | an unknown manifold. The running time depends only linearly on the | | + | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: MIT (`GPL v3 `_) | + | | dimension :math:`k`. | | + | | | :Requires: `Eigen `__ :math:`\geq` 3.1.0 and `CGAL `__ :math:`\geq` 4.11.0 | + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+ + | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` | + +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/witness_complex_sum.inc b/src/cython/doc/witness_complex_sum.inc index 2be8b220..71b65a71 100644 --- a/src/cython/doc/witness_complex_sum.inc +++ b/src/cython/doc/witness_complex_sum.inc @@ -1,18 +1,18 @@ .. table:: :widths: 30 50 20 - +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ - | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich | - | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | - | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | The data structure is described in | | - | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 `_ for Euclidean versions only) | - | | | | - | | | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 for Euclidean versions only | - +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ - | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | - | | * :doc:`strong_witness_complex_ref` | - | | * :doc:`euclidean_witness_complex_ref` | - | | * :doc:`euclidean_strong_witness_complex_ref` | - +-------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------+ + | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich | + | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | + | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The data structure is described in | | + | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 `_ for Euclidean versions only) | + | | | | + | | | :Requires: `Eigen `__ :math:`\geq` 3.1.0 and `CGAL `__ :math:`\geq` 4.11.0 for Euclidean versions only | + +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------+ + | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | + | | * :doc:`strong_witness_complex_ref` | + | | * :doc:`euclidean_witness_complex_ref` | + | | * :doc:`euclidean_strong_witness_complex_ref` | + +-------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -- cgit v1.2.3 From d6c903603bbc8833aad92c7d8c09cf78e99f689d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 29 Aug 2019 16:50:12 +0200 Subject: Add conda installation. NumPy became mandatory --- src/cython/doc/installation.rst | 52 +++++++++++++++++------------------------ 1 file changed, 21 insertions(+), 31 deletions(-) diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst index e40a2ef9..34156942 100644 --- a/src/cython/doc/installation.rst +++ b/src/cython/doc/installation.rst @@ -5,16 +5,22 @@ Installation ############ +Conda +***** +The easiest way to install the Python version of GUDHI is using +`conda `_. + Compiling ********* The library uses c++11 and requires `Boost `_ ≥ 1.56.0, -`CMake `_ ≥ 3.1 to generate makefiles, and -`Cython `_ to compile the GUDHI Python module. +`CMake `_ ≥ 3.1 to generate makefiles, +`NumPy `_ and `Cython `_ to compile +the GUDHI Python module. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015. On `Windows `_ , only Python -3.5 and 3.6 are available because of the required Visual Studio version. +≥ 3.5 are available because of the required Visual Studio version. On other systems, if you have several Python/Cython installed, the version 2.X will be used by default, but you can force it by adding @@ -87,11 +93,14 @@ You shall have something like: Python version 2.7.15 Cython version 0.26.1 + Numpy version 1.14.1 Eigen3 version 3.1.1 - Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex; - persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;alpha_complex; + Installed modules are: off_reader;simplex_tree;rips_complex; + cubical_complex;periodic_cubical_complex;reader_utils;witness_complex; + strong_witness_complex;alpha_complex; + Missing modules are: bottleneck_distance;nerve_gic;subsampling; + tangential_complex;persistence_graphical_tools; euclidean_witness_complex;euclidean_strong_witness_complex; - Missing modules are: bottleneck_distance;nerve_gic;subsampling;tangential_complex;persistence_graphical_tools; CGAL version 4.7.1000 GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so @@ -99,7 +108,7 @@ You shall have something like: Here, you can see that bottleneck_distance, nerve_gic, subsampling and tangential_complex are missing because of the CGAL version. -persistence_graphical_tools is not available as numpy and matplotlib are not +persistence_graphical_tools is not available as matplotlib is not available. Unitary tests cannot be run as pytest is missing. @@ -113,9 +122,11 @@ A complete configuration would be : Matplotlib version 2.2.2 Numpy version 1.14.5 Eigen3 version 3.3.4 - Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex; - persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;persistence_graphical_tools; - bottleneck_distance;nerve_gic;subsampling;tangential_complex;alpha_complex;euclidean_witness_complex; + Installed modules are: off_reader;simplex_tree;rips_complex; + cubical_complex;periodic_cubical_complex;persistence_graphical_tools; + reader_utils;witness_complex;strong_witness_complex; + persistence_graphical_tools;bottleneck_distance;nerve_gic;subsampling; + tangential_complex;alpha_complex;euclidean_witness_complex; euclidean_strong_witness_complex; CGAL header only version 4.11.0 GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so @@ -194,27 +205,6 @@ formats and interactive environments across platforms. The following examples require the `Matplotlib `_: -.. only:: builder_html - - * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` - * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>` - * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` - * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` - * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` - * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` - * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` - * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` - -NumPy -===== - -The :doc:`persistence graphical tools ` -module requires `NumPy `_, a fundamental package for -scientific computing with Python. - -The following examples require the `NumPy `_: - .. only:: builder_html * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` -- cgit v1.2.3 From 8c9b845691ef51affd82b63d7229549a0353356d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 29 Aug 2019 17:03:36 +0200 Subject: GUDHI version 3.0.0.rc1 --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index eb2a0666..bc34d9c7 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 0) -set (GUDHI_PATCH_VERSION 0) +set (GUDHI_PATCH_VERSION 0.rc1) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 38463699876e8cd5af3fc5ebdc5b6202d3b7d314 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 30 Aug 2019 08:09:30 +0200 Subject: Modify the documentation headers with last web site modifications --- src/common/doc/header.html | 51 +++++++-------- src/cython/doc/_templates/layout.html | 113 +++++++++++++++++----------------- 2 files changed, 83 insertions(+), 81 deletions(-) diff --git a/src/common/doc/header.html b/src/common/doc/header.html index c12d2816..9fdb2321 100644 --- a/src/common/doc/header.html +++ b/src/common/doc/header.html @@ -25,62 +25,63 @@ $extrastylesheet

- + +
diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html index bc0e9658..fe64fb3d 100644 --- a/src/cython/doc/_templates/layout.html +++ b/src/cython/doc/_templates/layout.html @@ -165,62 +165,63 @@ + +
-- cgit v1.2.3 From f510a7e607b46ba8cc118cd787ff9b0b8bff091f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 30 Aug 2019 08:48:05 +0200 Subject: Eigen scope refine --- src/common/doc/installation.h | 4 ++-- src/cython/doc/installation.rst | 8 +++----- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 02d3c73a..54f86573 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -60,7 +60,7 @@ make doxygen * Having GMP version 4.2 or higher installed is recommended. * * \subsection cgal CGAL - * Some GUDHI modules (cf. \ref main_page "modules list"), and few examples requires CGAL, a C++ library that provides + * Some GUDHI modules (cf. \ref main_page "modules list"), and few examples require CGAL, a C++ library that provides * easy access to efficient and reliable geometric algorithms. * * \note There is no need to install CGAL, you can just cmake . && make CGAL (or even @@ -126,7 +126,7 @@ make doxygen * Alpha_complex/alpha_complex_3d_persistence.cpp * * \subsection eigen Eigen - * The \ref alpha_complex data structure and few examples requires + * Some GUDHI modules (cf. \ref main_page "modules list"), and few examples require * Eigen is a C++ template library for linear algebra: * matrices, vectors, numerical solvers, and related algorithms. * diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst index 34156942..15f383c2 100644 --- a/src/cython/doc/installation.rst +++ b/src/cython/doc/installation.rst @@ -156,7 +156,7 @@ CGAL ==== Some GUDHI modules (cf. :doc:`modules list `), and few examples -requires CGAL, a C++ library that provides easy access to efficient and +require CGAL, a C++ library that provides easy access to efficient and reliable geometric algorithms. @@ -178,10 +178,8 @@ The following examples requires CGAL version ≥ 4.11.0: Eigen ===== -The :doc:`Alpha complex `, -:doc:`Tangential complex ` and -:doc:`Witness complex ` data structures and few -examples requires `Eigen `_, a C++ template +Some GUDHI modules (cf. :doc:`modules list `), and few examples +require `Eigen `_, a C++ template library for linear algebra: matrices, vectors, numerical solvers, and related algorithms. -- cgit v1.2.3 From 3c96720c786462827820f4d35e720c8f2084867b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 5 Sep 2019 12:54:57 +0200 Subject: Reduce number of code line --- src/cython/gudhi/alpha_complex.pyx | 3 +-- src/cython/gudhi/euclidean_strong_witness_complex.pyx | 7 ++++--- src/cython/gudhi/euclidean_witness_complex.pyx | 7 ++++--- src/cython/gudhi/nerve_gic.pyx | 3 +-- src/cython/gudhi/rips_complex.pyx | 5 +++-- src/cython/gudhi/strong_witness_complex.pyx | 6 +++--- src/cython/gudhi/tangential_complex.pyx | 3 +-- src/cython/gudhi/witness_complex.pyx | 6 +++--- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/src/cython/gudhi/alpha_complex.pyx b/src/cython/gudhi/alpha_complex.pyx index 88ef3b1b..6d6309db 100644 --- a/src/cython/gudhi/alpha_complex.pyx +++ b/src/cython/gudhi/alpha_complex.pyx @@ -112,6 +112,5 @@ cdef class AlphaComplex: """ stree = SimplexTree() cdef intptr_t stree_int_ptr=stree.thisptr - cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr - self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + self.thisptr.create_simplex_tree(stree_int_ptr, max_alpha_square) return stree diff --git a/src/cython/gudhi/euclidean_strong_witness_complex.pyx b/src/cython/gudhi/euclidean_strong_witness_complex.pyx index 465635c4..5d6e4fb9 100644 --- a/src/cython/gudhi/euclidean_strong_witness_complex.pyx +++ b/src/cython/gudhi/euclidean_strong_witness_complex.pyx @@ -71,11 +71,12 @@ cdef class EuclideanStrongWitnessComplex: """ stree = SimplexTree() cdef intptr_t stree_int_ptr=stree.thisptr - cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square, limit_dimension) + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square, limit_dimension) else: - self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) return stree def get_point(self, vertex): diff --git a/src/cython/gudhi/euclidean_witness_complex.pyx b/src/cython/gudhi/euclidean_witness_complex.pyx index 92d54eb5..2531919b 100644 --- a/src/cython/gudhi/euclidean_witness_complex.pyx +++ b/src/cython/gudhi/euclidean_witness_complex.pyx @@ -71,11 +71,12 @@ cdef class EuclideanWitnessComplex: """ stree = SimplexTree() cdef intptr_t stree_int_ptr=stree.thisptr - cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square, limit_dimension) + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square, limit_dimension) else: - self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) return stree def get_point(self, vertex): diff --git a/src/cython/gudhi/nerve_gic.pyx b/src/cython/gudhi/nerve_gic.pyx index 9fec626f..2b230b8c 100644 --- a/src/cython/gudhi/nerve_gic.pyx +++ b/src/cython/gudhi/nerve_gic.pyx @@ -164,8 +164,7 @@ cdef class CoverComplex: """ stree = SimplexTree() cdef intptr_t stree_int_ptr=stree.thisptr - cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr - self.thisptr.create_simplex_tree(stree_ptr) + self.thisptr.create_simplex_tree(stree_int_ptr) return stree def find_simplices(self): diff --git a/src/cython/gudhi/rips_complex.pyx b/src/cython/gudhi/rips_complex.pyx index 1a6c8571..f2cd6a8d 100644 --- a/src/cython/gudhi/rips_complex.pyx +++ b/src/cython/gudhi/rips_complex.pyx @@ -97,6 +97,7 @@ cdef class RipsComplex: :rtype: SimplexTree """ stree = SimplexTree() - cdef intptr_t stree_ptr=stree.thisptr - self.thisref.create_simplex_tree(stree_ptr, max_dimension) + cdef intptr_t stree_int_ptr=stree.thisptr + self.thisref.create_simplex_tree(stree_int_ptr, + max_dimension) return stree diff --git a/src/cython/gudhi/strong_witness_complex.pyx b/src/cython/gudhi/strong_witness_complex.pyx index 4e3d1b67..e757abea 100644 --- a/src/cython/gudhi/strong_witness_complex.pyx +++ b/src/cython/gudhi/strong_witness_complex.pyx @@ -69,10 +69,10 @@ cdef class StrongWitnessComplex: """ stree = SimplexTree() cdef intptr_t stree_int_ptr=stree.thisptr - cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_ptr, + self.thisptr.create_simplex_tree(stree_int_ptr, max_alpha_square, limit_dimension) else: - self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) return stree diff --git a/src/cython/gudhi/tangential_complex.pyx b/src/cython/gudhi/tangential_complex.pyx index 9eb22115..3a945fe2 100644 --- a/src/cython/gudhi/tangential_complex.pyx +++ b/src/cython/gudhi/tangential_complex.pyx @@ -144,8 +144,7 @@ cdef class TangentialComplex: """ stree = SimplexTree() cdef intptr_t stree_int_ptr=stree.thisptr - cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr - self.thisptr.create_simplex_tree(stree_ptr) + self.thisptr.create_simplex_tree(stree_int_ptr) return stree def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0): diff --git a/src/cython/gudhi/witness_complex.pyx b/src/cython/gudhi/witness_complex.pyx index c859877d..baa70b7a 100644 --- a/src/cython/gudhi/witness_complex.pyx +++ b/src/cython/gudhi/witness_complex.pyx @@ -69,10 +69,10 @@ cdef class WitnessComplex: """ stree = SimplexTree() cdef intptr_t stree_int_ptr=stree.thisptr - cdef Simplex_tree_interface_full_featured* stree_ptr = stree_int_ptr if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_ptr, + self.thisptr.create_simplex_tree(stree_int_ptr, max_alpha_square, limit_dimension) else: - self.thisptr.create_simplex_tree(stree_ptr, max_alpha_square) + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) return stree -- cgit v1.2.3 From 4fd93dbc06b3020ddd856aed2d8ff37e1d27e517 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 5 Sep 2019 17:10:15 +0200 Subject: Reduce number of lines --- src/cython/gudhi/simplex_tree.pyx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/cython/gudhi/simplex_tree.pyx b/src/cython/gudhi/simplex_tree.pyx index 51134681..9f490271 100644 --- a/src/cython/gudhi/simplex_tree.pyx +++ b/src/cython/gudhi/simplex_tree.pyx @@ -44,8 +44,7 @@ cdef class SimplexTree: # The real cython constructor def __cinit__(self): - cdef Simplex_tree_interface_full_featured* ptr = new Simplex_tree_interface_full_featured() - self.thisptr = ptr + self.thisptr = (new Simplex_tree_interface_full_featured()) def __dealloc__(self): cdef Simplex_tree_interface_full_featured* ptr = self.get_ptr() -- cgit v1.2.3 From dcbdaa0dc00eb069d1a13575f22c0a2f7d63dcc8 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 6 Sep 2019 09:54:16 +0200 Subject: Use setuptools to remove warnings about install_requires and setup_requires --- src/cython/setup.py.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in index fd4307e3..974dfb7a 100644 --- a/src/cython/setup.py.in +++ b/src/cython/setup.py.in @@ -1,4 +1,4 @@ -from distutils.core import setup, Extension +from setuptools import setup, Extension from Cython.Build import cythonize from numpy import get_include as numpy_get_include -- cgit v1.2.3 From 68753b3c28321e28eedd5829c94234da84e25c8d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 9 Sep 2019 16:03:40 +0200 Subject: Code review: rename cython as python (make target and directory --- .appveyor.yml | 4 +- .circleci/config.yml | 2 +- .gitignore | 16 +- CMakeLists.txt | 4 +- src/CMakeLists.txt | 4 +- src/Doxyfile.in | 2 +- .../modules/GUDHI_third_party_libraries.cmake | 20 +- src/cmake/modules/GUDHI_user_version_target.cmake | 2 +- src/cython/CMakeLists.txt | 433 ------------------ src/cython/CONVENTIONS | 9 - src/cython/README | 3 - src/cython/doc/_templates/layout.html | 275 ----------- src/cython/doc/alpha_complex_ref.rst | 14 - src/cython/doc/alpha_complex_sum.inc | 20 - src/cython/doc/alpha_complex_user.rst | 210 --------- src/cython/doc/bottleneck_distance_sum.inc | 14 - src/cython/doc/bottleneck_distance_user.rst | 67 --- src/cython/doc/citation.rst | 19 - src/cython/doc/conf.py | 203 -------- src/cython/doc/cubical_complex_ref.rst | 13 - src/cython/doc/cubical_complex_sum.inc | 14 - src/cython/doc/cubical_complex_user.rst | 168 ------- .../doc/euclidean_strong_witness_complex_ref.rst | 14 - src/cython/doc/euclidean_witness_complex_ref.rst | 14 - src/cython/doc/examples.rst | 30 -- src/cython/doc/fileformats.rst | 127 ------ .../doc/img/graphical_tools_representation.png | Bin 10846 -> 0 bytes src/cython/doc/index.rst | 86 ---- src/cython/doc/installation.rst | 256 ----------- src/cython/doc/nerve_gic_complex_ref.rst | 14 - src/cython/doc/nerve_gic_complex_sum.inc | 16 - src/cython/doc/nerve_gic_complex_user.rst | 315 ------------- src/cython/doc/periodic_cubical_complex_ref.rst | 13 - src/cython/doc/persistence_graphical_tools_ref.rst | 12 - src/cython/doc/persistence_graphical_tools_sum.inc | 14 - .../doc/persistence_graphical_tools_user.rst | 73 --- src/cython/doc/persistent_cohomology_sum.inc | 26 -- src/cython/doc/persistent_cohomology_user.rst | 120 ----- src/cython/doc/python3-sphinx-build.py | 11 - src/cython/doc/reader_utils_ref.rst | 15 - src/cython/doc/rips_complex_ref.rst | 14 - src/cython/doc/rips_complex_sum.inc | 16 - src/cython/doc/rips_complex_user.rst | 345 -------------- src/cython/doc/simplex_tree_ref.rst | 14 - src/cython/doc/simplex_tree_sum.inc | 13 - src/cython/doc/simplex_tree_user.rst | 72 --- src/cython/doc/strong_witness_complex_ref.rst | 14 - src/cython/doc/tangential_complex_ref.rst | 14 - src/cython/doc/tangential_complex_sum.inc | 14 - src/cython/doc/tangential_complex_user.rst | 204 --------- src/cython/doc/todos.rst | 9 - src/cython/doc/witness_complex_ref.rst | 14 - src/cython/doc/witness_complex_sum.inc | 18 - src/cython/doc/witness_complex_user.rst | 135 ------ ...ex_diagram_persistence_from_off_file_example.py | 68 --- .../example/alpha_complex_from_points_example.py | 55 --- .../alpha_rips_persistence_bottleneck_distance.py | 105 ----- src/cython/example/bottleneck_basic_example.py | 37 -- .../example/coordinate_graph_induced_complex.py | 68 --- ...ex_diagram_persistence_from_off_file_example.py | 83 ---- ...ex_diagram_persistence_from_off_file_example.py | 82 ---- .../example/functional_graph_induced_complex.py | 69 --- .../example/gudhi_graphical_tools_example.py | 43 -- src/cython/example/nerve_of_a_covering.py | 70 --- ...arcode_persistence_from_perseus_file_example.py | 74 --- .../random_cubical_complex_persistence_example.py | 49 -- ...istence_from_correlation_matrix_file_example.py | 87 ---- ...ersistence_from_distance_matrix_file_example.py | 63 --- ...ex_diagram_persistence_from_off_file_example.py | 72 --- .../example/rips_complex_from_points_example.py | 27 -- src/cython/example/rips_persistence_diagram.py | 30 -- src/cython/example/simplex_tree_example.py | 51 --- .../example/sparse_rips_persistence_diagram.py | 32 -- ...complex_plain_homology_from_off_file_example.py | 64 --- .../example/voronoi_graph_induced_complex.py | 65 --- .../witness_complex_from_nearest_landmark_table.py | 36 -- src/cython/gudhi/__init__.py | 1 - src/cython/gudhi/__init__.py.in | 40 -- src/cython/gudhi/alpha_complex.pyx | 116 ----- src/cython/gudhi/bottleneck.pyx | 49 -- src/cython/gudhi/cubical_complex.pyx | 188 -------- .../gudhi/euclidean_strong_witness_complex.pyx | 92 ---- src/cython/gudhi/euclidean_witness_complex.pyx | 92 ---- src/cython/gudhi/nerve_gic.pyx | 412 ----------------- src/cython/gudhi/off_reader.pyx | 38 -- src/cython/gudhi/periodic_cubical_complex.pyx | 190 -------- src/cython/gudhi/persistence_graphical_tools.py | 423 ----------------- src/cython/gudhi/reader_utils.pyx | 87 ---- src/cython/gudhi/rips_complex.pyx | 103 ----- src/cython/gudhi/simplex_tree.pxd | 56 --- src/cython/gudhi/simplex_tree.pyx | 508 --------------------- src/cython/gudhi/strong_witness_complex.pyx | 78 ---- src/cython/gudhi/subsampling.pyx | 130 ------ src/cython/gudhi/tangential_complex.pyx | 173 ------- src/cython/gudhi/witness_complex.pyx | 78 ---- src/cython/include/Alpha_complex_interface.h | 72 --- src/cython/include/Bottleneck_distance_interface.h | 43 -- src/cython/include/Cubical_complex_interface.h | 52 --- .../Euclidean_strong_witness_complex_interface.h | 83 ---- .../include/Euclidean_witness_complex_interface.h | 82 ---- src/cython/include/Nerve_gic_interface.h | 51 --- src/cython/include/Off_reader_interface.h | 32 -- .../include/Persistent_cohomology_interface.h | 111 ----- src/cython/include/Reader_utils_interface.h | 46 -- src/cython/include/Rips_complex_interface.h | 72 --- src/cython/include/Simplex_tree_interface.h | 144 ------ .../include/Strong_witness_complex_interface.h | 63 --- src/cython/include/Subsampling_interface.h | 109 ----- src/cython/include/Tangential_complex_interface.h | 111 ----- src/cython/include/Witness_complex_interface.h | 64 --- src/cython/setup.py.in | 53 --- src/cython/test/test_alpha_complex.py | 90 ---- src/cython/test/test_bottleneck_distance.py | 23 - src/cython/test/test_cover_complex.py | 85 ---- src/cython/test/test_cubical_complex.py | 98 ---- src/cython/test/test_euclidean_witness_complex.py | 95 ---- src/cython/test/test_reader_utils.py | 126 ----- src/cython/test/test_rips_complex.py | 133 ------ src/cython/test/test_simplex_tree.py | 250 ---------- src/cython/test/test_subsampling.py | 179 -------- src/cython/test/test_tangential_complex.py | 55 --- src/cython/test/test_witness_complex.py | 62 --- src/python/CMakeLists.txt | 432 ++++++++++++++++++ src/python/CONVENTIONS | 9 + src/python/README | 3 + src/python/doc/_templates/layout.html | 275 +++++++++++ src/python/doc/alpha_complex_ref.rst | 14 + src/python/doc/alpha_complex_sum.inc | 20 + src/python/doc/alpha_complex_user.rst | 210 +++++++++ src/python/doc/bottleneck_distance_sum.inc | 14 + src/python/doc/bottleneck_distance_user.rst | 67 +++ src/python/doc/citation.rst | 19 + src/python/doc/conf.py | 203 ++++++++ src/python/doc/cubical_complex_ref.rst | 13 + src/python/doc/cubical_complex_sum.inc | 14 + src/python/doc/cubical_complex_user.rst | 168 +++++++ .../doc/euclidean_strong_witness_complex_ref.rst | 14 + src/python/doc/euclidean_witness_complex_ref.rst | 14 + src/python/doc/examples.rst | 30 ++ src/python/doc/fileformats.rst | 127 ++++++ .../doc/img/graphical_tools_representation.png | Bin 0 -> 10846 bytes src/python/doc/index.rst | 86 ++++ src/python/doc/installation.rst | 256 +++++++++++ src/python/doc/nerve_gic_complex_ref.rst | 14 + src/python/doc/nerve_gic_complex_sum.inc | 16 + src/python/doc/nerve_gic_complex_user.rst | 315 +++++++++++++ src/python/doc/periodic_cubical_complex_ref.rst | 13 + src/python/doc/persistence_graphical_tools_ref.rst | 11 + src/python/doc/persistence_graphical_tools_sum.inc | 14 + .../doc/persistence_graphical_tools_user.rst | 73 +++ src/python/doc/persistent_cohomology_sum.inc | 26 ++ src/python/doc/persistent_cohomology_user.rst | 120 +++++ src/python/doc/python3-sphinx-build.py | 11 + src/python/doc/reader_utils_ref.rst | 15 + src/python/doc/rips_complex_ref.rst | 14 + src/python/doc/rips_complex_sum.inc | 16 + src/python/doc/rips_complex_user.rst | 345 ++++++++++++++ src/python/doc/simplex_tree_ref.rst | 14 + src/python/doc/simplex_tree_sum.inc | 13 + src/python/doc/simplex_tree_user.rst | 72 +++ src/python/doc/strong_witness_complex_ref.rst | 14 + src/python/doc/tangential_complex_ref.rst | 14 + src/python/doc/tangential_complex_sum.inc | 14 + src/python/doc/tangential_complex_user.rst | 204 +++++++++ src/python/doc/todos.rst | 9 + src/python/doc/witness_complex_ref.rst | 14 + src/python/doc/witness_complex_sum.inc | 18 + src/python/doc/witness_complex_user.rst | 135 ++++++ ...ex_diagram_persistence_from_off_file_example.py | 68 +++ .../example/alpha_complex_from_points_example.py | 55 +++ .../alpha_rips_persistence_bottleneck_distance.py | 105 +++++ src/python/example/bottleneck_basic_example.py | 37 ++ .../example/coordinate_graph_induced_complex.py | 68 +++ ...ex_diagram_persistence_from_off_file_example.py | 83 ++++ ...ex_diagram_persistence_from_off_file_example.py | 82 ++++ .../example/functional_graph_induced_complex.py | 69 +++ .../example/gudhi_graphical_tools_example.py | 43 ++ src/python/example/nerve_of_a_covering.py | 70 +++ ...arcode_persistence_from_perseus_file_example.py | 74 +++ .../random_cubical_complex_persistence_example.py | 49 ++ ...istence_from_correlation_matrix_file_example.py | 87 ++++ ...ersistence_from_distance_matrix_file_example.py | 63 +++ ...ex_diagram_persistence_from_off_file_example.py | 72 +++ .../example/rips_complex_from_points_example.py | 27 ++ src/python/example/rips_persistence_diagram.py | 30 ++ src/python/example/simplex_tree_example.py | 51 +++ .../example/sparse_rips_persistence_diagram.py | 32 ++ ...complex_plain_homology_from_off_file_example.py | 64 +++ .../example/voronoi_graph_induced_complex.py | 65 +++ .../witness_complex_from_nearest_landmark_table.py | 36 ++ src/python/gudhi/__init__.py | 1 + src/python/gudhi/__init__.py.in | 40 ++ src/python/gudhi/alpha_complex.pyx | 116 +++++ src/python/gudhi/bottleneck.pyx | 49 ++ src/python/gudhi/cubical_complex.pyx | 188 ++++++++ .../gudhi/euclidean_strong_witness_complex.pyx | 92 ++++ src/python/gudhi/euclidean_witness_complex.pyx | 92 ++++ src/python/gudhi/nerve_gic.pyx | 412 +++++++++++++++++ src/python/gudhi/off_reader.pyx | 38 ++ src/python/gudhi/periodic_cubical_complex.pyx | 190 ++++++++ src/python/gudhi/persistence_graphical_tools.py | 423 +++++++++++++++++ src/python/gudhi/reader_utils.pyx | 87 ++++ src/python/gudhi/rips_complex.pyx | 103 +++++ src/python/gudhi/simplex_tree.pxd | 56 +++ src/python/gudhi/simplex_tree.pyx | 508 +++++++++++++++++++++ src/python/gudhi/strong_witness_complex.pyx | 78 ++++ src/python/gudhi/subsampling.pyx | 130 ++++++ src/python/gudhi/tangential_complex.pyx | 173 +++++++ src/python/gudhi/witness_complex.pyx | 78 ++++ src/python/include/Alpha_complex_interface.h | 72 +++ src/python/include/Bottleneck_distance_interface.h | 43 ++ src/python/include/Cubical_complex_interface.h | 52 +++ .../Euclidean_strong_witness_complex_interface.h | 83 ++++ .../include/Euclidean_witness_complex_interface.h | 82 ++++ src/python/include/Nerve_gic_interface.h | 51 +++ src/python/include/Off_reader_interface.h | 32 ++ .../include/Persistent_cohomology_interface.h | 111 +++++ src/python/include/Reader_utils_interface.h | 46 ++ src/python/include/Rips_complex_interface.h | 72 +++ src/python/include/Simplex_tree_interface.h | 144 ++++++ .../include/Strong_witness_complex_interface.h | 63 +++ src/python/include/Subsampling_interface.h | 109 +++++ src/python/include/Tangential_complex_interface.h | 111 +++++ src/python/include/Witness_complex_interface.h | 64 +++ src/python/setup.py.in | 53 +++ src/python/test/test_alpha_complex.py | 90 ++++ src/python/test/test_bottleneck_distance.py | 23 + src/python/test/test_cover_complex.py | 85 ++++ src/python/test/test_cubical_complex.py | 98 ++++ src/python/test/test_euclidean_witness_complex.py | 95 ++++ src/python/test/test_reader_utils.py | 126 +++++ src/python/test/test_rips_complex.py | 133 ++++++ src/python/test/test_simplex_tree.py | 250 ++++++++++ src/python/test/test_subsampling.py | 179 ++++++++ src/python/test/test_tangential_complex.py | 55 +++ src/python/test/test_witness_complex.py | 62 +++ 236 files changed, 10101 insertions(+), 10115 deletions(-) delete mode 100644 src/cython/CMakeLists.txt delete mode 100644 src/cython/CONVENTIONS delete mode 100644 src/cython/README delete mode 100644 src/cython/doc/_templates/layout.html delete mode 100644 src/cython/doc/alpha_complex_ref.rst delete mode 100644 src/cython/doc/alpha_complex_sum.inc delete mode 100644 src/cython/doc/alpha_complex_user.rst delete mode 100644 src/cython/doc/bottleneck_distance_sum.inc delete mode 100644 src/cython/doc/bottleneck_distance_user.rst delete mode 100644 src/cython/doc/citation.rst delete mode 100755 src/cython/doc/conf.py delete mode 100644 src/cython/doc/cubical_complex_ref.rst delete mode 100644 src/cython/doc/cubical_complex_sum.inc delete mode 100644 src/cython/doc/cubical_complex_user.rst delete mode 100644 src/cython/doc/euclidean_strong_witness_complex_ref.rst delete mode 100644 src/cython/doc/euclidean_witness_complex_ref.rst delete mode 100644 src/cython/doc/examples.rst delete mode 100644 src/cython/doc/fileformats.rst delete mode 100644 src/cython/doc/img/graphical_tools_representation.png delete mode 100644 src/cython/doc/index.rst delete mode 100644 src/cython/doc/installation.rst delete mode 100644 src/cython/doc/nerve_gic_complex_ref.rst delete mode 100644 src/cython/doc/nerve_gic_complex_sum.inc delete mode 100644 src/cython/doc/nerve_gic_complex_user.rst delete mode 100644 src/cython/doc/periodic_cubical_complex_ref.rst delete mode 100644 src/cython/doc/persistence_graphical_tools_ref.rst delete mode 100644 src/cython/doc/persistence_graphical_tools_sum.inc delete mode 100644 src/cython/doc/persistence_graphical_tools_user.rst delete mode 100644 src/cython/doc/persistent_cohomology_sum.inc delete mode 100644 src/cython/doc/persistent_cohomology_user.rst delete mode 100755 src/cython/doc/python3-sphinx-build.py delete mode 100644 src/cython/doc/reader_utils_ref.rst delete mode 100644 src/cython/doc/rips_complex_ref.rst delete mode 100644 src/cython/doc/rips_complex_sum.inc delete mode 100644 src/cython/doc/rips_complex_user.rst delete mode 100644 src/cython/doc/simplex_tree_ref.rst delete mode 100644 src/cython/doc/simplex_tree_sum.inc delete mode 100644 src/cython/doc/simplex_tree_user.rst delete mode 100644 src/cython/doc/strong_witness_complex_ref.rst delete mode 100644 src/cython/doc/tangential_complex_ref.rst delete mode 100644 src/cython/doc/tangential_complex_sum.inc delete mode 100644 src/cython/doc/tangential_complex_user.rst delete mode 100644 src/cython/doc/todos.rst delete mode 100644 src/cython/doc/witness_complex_ref.rst delete mode 100644 src/cython/doc/witness_complex_sum.inc delete mode 100644 src/cython/doc/witness_complex_user.rst delete mode 100755 src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py delete mode 100755 src/cython/example/alpha_complex_from_points_example.py delete mode 100755 src/cython/example/alpha_rips_persistence_bottleneck_distance.py delete mode 100755 src/cython/example/bottleneck_basic_example.py delete mode 100755 src/cython/example/coordinate_graph_induced_complex.py delete mode 100755 src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py delete mode 100755 src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py delete mode 100755 src/cython/example/functional_graph_induced_complex.py delete mode 100755 src/cython/example/gudhi_graphical_tools_example.py delete mode 100755 src/cython/example/nerve_of_a_covering.py delete mode 100755 src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py delete mode 100755 src/cython/example/random_cubical_complex_persistence_example.py delete mode 100755 src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py delete mode 100755 src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py delete mode 100755 src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py delete mode 100755 src/cython/example/rips_complex_from_points_example.py delete mode 100755 src/cython/example/rips_persistence_diagram.py delete mode 100755 src/cython/example/simplex_tree_example.py delete mode 100755 src/cython/example/sparse_rips_persistence_diagram.py delete mode 100755 src/cython/example/tangential_complex_plain_homology_from_off_file_example.py delete mode 100755 src/cython/example/voronoi_graph_induced_complex.py delete mode 100755 src/cython/example/witness_complex_from_nearest_landmark_table.py delete mode 100644 src/cython/gudhi/__init__.py delete mode 100644 src/cython/gudhi/__init__.py.in delete mode 100644 src/cython/gudhi/alpha_complex.pyx delete mode 100644 src/cython/gudhi/bottleneck.pyx delete mode 100644 src/cython/gudhi/cubical_complex.pyx delete mode 100644 src/cython/gudhi/euclidean_strong_witness_complex.pyx delete mode 100644 src/cython/gudhi/euclidean_witness_complex.pyx delete mode 100644 src/cython/gudhi/nerve_gic.pyx delete mode 100644 src/cython/gudhi/off_reader.pyx delete mode 100644 src/cython/gudhi/periodic_cubical_complex.pyx delete mode 100644 src/cython/gudhi/persistence_graphical_tools.py delete mode 100644 src/cython/gudhi/reader_utils.pyx delete mode 100644 src/cython/gudhi/rips_complex.pyx delete mode 100644 src/cython/gudhi/simplex_tree.pxd delete mode 100644 src/cython/gudhi/simplex_tree.pyx delete mode 100644 src/cython/gudhi/strong_witness_complex.pyx delete mode 100644 src/cython/gudhi/subsampling.pyx delete mode 100644 src/cython/gudhi/tangential_complex.pyx delete mode 100644 src/cython/gudhi/witness_complex.pyx delete mode 100644 src/cython/include/Alpha_complex_interface.h delete mode 100644 src/cython/include/Bottleneck_distance_interface.h delete mode 100644 src/cython/include/Cubical_complex_interface.h delete mode 100644 src/cython/include/Euclidean_strong_witness_complex_interface.h delete mode 100644 src/cython/include/Euclidean_witness_complex_interface.h delete mode 100644 src/cython/include/Nerve_gic_interface.h delete mode 100644 src/cython/include/Off_reader_interface.h delete mode 100644 src/cython/include/Persistent_cohomology_interface.h delete mode 100644 src/cython/include/Reader_utils_interface.h delete mode 100644 src/cython/include/Rips_complex_interface.h delete mode 100644 src/cython/include/Simplex_tree_interface.h delete mode 100644 src/cython/include/Strong_witness_complex_interface.h delete mode 100644 src/cython/include/Subsampling_interface.h delete mode 100644 src/cython/include/Tangential_complex_interface.h delete mode 100644 src/cython/include/Witness_complex_interface.h delete mode 100644 src/cython/setup.py.in delete mode 100755 src/cython/test/test_alpha_complex.py delete mode 100755 src/cython/test/test_bottleneck_distance.py delete mode 100755 src/cython/test/test_cover_complex.py delete mode 100755 src/cython/test/test_cubical_complex.py delete mode 100755 src/cython/test/test_euclidean_witness_complex.py delete mode 100755 src/cython/test/test_reader_utils.py delete mode 100755 src/cython/test/test_rips_complex.py delete mode 100755 src/cython/test/test_simplex_tree.py delete mode 100755 src/cython/test/test_subsampling.py delete mode 100755 src/cython/test/test_tangential_complex.py delete mode 100755 src/cython/test/test_witness_complex.py create mode 100644 src/python/CMakeLists.txt create mode 100644 src/python/CONVENTIONS create mode 100644 src/python/README create mode 100644 src/python/doc/_templates/layout.html create mode 100644 src/python/doc/alpha_complex_ref.rst create mode 100644 src/python/doc/alpha_complex_sum.inc create mode 100644 src/python/doc/alpha_complex_user.rst create mode 100644 src/python/doc/bottleneck_distance_sum.inc create mode 100644 src/python/doc/bottleneck_distance_user.rst create mode 100644 src/python/doc/citation.rst create mode 100755 src/python/doc/conf.py create mode 100644 src/python/doc/cubical_complex_ref.rst create mode 100644 src/python/doc/cubical_complex_sum.inc create mode 100644 src/python/doc/cubical_complex_user.rst create mode 100644 src/python/doc/euclidean_strong_witness_complex_ref.rst create mode 100644 src/python/doc/euclidean_witness_complex_ref.rst create mode 100644 src/python/doc/examples.rst create mode 100644 src/python/doc/fileformats.rst create mode 100644 src/python/doc/img/graphical_tools_representation.png create mode 100644 src/python/doc/index.rst create mode 100644 src/python/doc/installation.rst create mode 100644 src/python/doc/nerve_gic_complex_ref.rst create mode 100644 src/python/doc/nerve_gic_complex_sum.inc create mode 100644 src/python/doc/nerve_gic_complex_user.rst create mode 100644 src/python/doc/periodic_cubical_complex_ref.rst create mode 100644 src/python/doc/persistence_graphical_tools_ref.rst create mode 100644 src/python/doc/persistence_graphical_tools_sum.inc create mode 100644 src/python/doc/persistence_graphical_tools_user.rst create mode 100644 src/python/doc/persistent_cohomology_sum.inc create mode 100644 src/python/doc/persistent_cohomology_user.rst create mode 100755 src/python/doc/python3-sphinx-build.py create mode 100644 src/python/doc/reader_utils_ref.rst create mode 100644 src/python/doc/rips_complex_ref.rst create mode 100644 src/python/doc/rips_complex_sum.inc create mode 100644 src/python/doc/rips_complex_user.rst create mode 100644 src/python/doc/simplex_tree_ref.rst create mode 100644 src/python/doc/simplex_tree_sum.inc create mode 100644 src/python/doc/simplex_tree_user.rst create mode 100644 src/python/doc/strong_witness_complex_ref.rst create mode 100644 src/python/doc/tangential_complex_ref.rst create mode 100644 src/python/doc/tangential_complex_sum.inc create mode 100644 src/python/doc/tangential_complex_user.rst create mode 100644 src/python/doc/todos.rst create mode 100644 src/python/doc/witness_complex_ref.rst create mode 100644 src/python/doc/witness_complex_sum.inc create mode 100644 src/python/doc/witness_complex_user.rst create mode 100755 src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py create mode 100755 src/python/example/alpha_complex_from_points_example.py create mode 100755 src/python/example/alpha_rips_persistence_bottleneck_distance.py create mode 100755 src/python/example/bottleneck_basic_example.py create mode 100755 src/python/example/coordinate_graph_induced_complex.py create mode 100755 src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py create mode 100755 src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py create mode 100755 src/python/example/functional_graph_induced_complex.py create mode 100755 src/python/example/gudhi_graphical_tools_example.py create mode 100755 src/python/example/nerve_of_a_covering.py create mode 100755 src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py create mode 100755 src/python/example/random_cubical_complex_persistence_example.py create mode 100755 src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py create mode 100755 src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py create mode 100755 src/python/example/rips_complex_diagram_persistence_from_off_file_example.py create mode 100755 src/python/example/rips_complex_from_points_example.py create mode 100755 src/python/example/rips_persistence_diagram.py create mode 100755 src/python/example/simplex_tree_example.py create mode 100755 src/python/example/sparse_rips_persistence_diagram.py create mode 100755 src/python/example/tangential_complex_plain_homology_from_off_file_example.py create mode 100755 src/python/example/voronoi_graph_induced_complex.py create mode 100755 src/python/example/witness_complex_from_nearest_landmark_table.py create mode 100644 src/python/gudhi/__init__.py create mode 100644 src/python/gudhi/__init__.py.in create mode 100644 src/python/gudhi/alpha_complex.pyx create mode 100644 src/python/gudhi/bottleneck.pyx create mode 100644 src/python/gudhi/cubical_complex.pyx create mode 100644 src/python/gudhi/euclidean_strong_witness_complex.pyx create mode 100644 src/python/gudhi/euclidean_witness_complex.pyx create mode 100644 src/python/gudhi/nerve_gic.pyx create mode 100644 src/python/gudhi/off_reader.pyx create mode 100644 src/python/gudhi/periodic_cubical_complex.pyx create mode 100644 src/python/gudhi/persistence_graphical_tools.py create mode 100644 src/python/gudhi/reader_utils.pyx create mode 100644 src/python/gudhi/rips_complex.pyx create mode 100644 src/python/gudhi/simplex_tree.pxd create mode 100644 src/python/gudhi/simplex_tree.pyx create mode 100644 src/python/gudhi/strong_witness_complex.pyx create mode 100644 src/python/gudhi/subsampling.pyx create mode 100644 src/python/gudhi/tangential_complex.pyx create mode 100644 src/python/gudhi/witness_complex.pyx create mode 100644 src/python/include/Alpha_complex_interface.h create mode 100644 src/python/include/Bottleneck_distance_interface.h create mode 100644 src/python/include/Cubical_complex_interface.h create mode 100644 src/python/include/Euclidean_strong_witness_complex_interface.h create mode 100644 src/python/include/Euclidean_witness_complex_interface.h create mode 100644 src/python/include/Nerve_gic_interface.h create mode 100644 src/python/include/Off_reader_interface.h create mode 100644 src/python/include/Persistent_cohomology_interface.h create mode 100644 src/python/include/Reader_utils_interface.h create mode 100644 src/python/include/Rips_complex_interface.h create mode 100644 src/python/include/Simplex_tree_interface.h create mode 100644 src/python/include/Strong_witness_complex_interface.h create mode 100644 src/python/include/Subsampling_interface.h create mode 100644 src/python/include/Tangential_complex_interface.h create mode 100644 src/python/include/Witness_complex_interface.h create mode 100644 src/python/setup.py.in create mode 100755 src/python/test/test_alpha_complex.py create mode 100755 src/python/test/test_bottleneck_distance.py create mode 100755 src/python/test/test_cover_complex.py create mode 100755 src/python/test/test_cubical_complex.py create mode 100755 src/python/test/test_euclidean_witness_complex.py create mode 100755 src/python/test/test_reader_utils.py create mode 100755 src/python/test/test_rips_complex.py create mode 100755 src/python/test/test_simplex_tree.py create mode 100755 src/python/test/test_subsampling.py create mode 100755 src/python/test/test_tangential_complex.py create mode 100755 src/python/test/test_witness_complex.py diff --git a/.appveyor.yml b/.appveyor.yml index ab943fb7..4b432277 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -54,8 +54,8 @@ build_script: - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( - cd src/cython & - MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & + cd src/python & + MSBuild Python.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & diff --git a/.circleci/config.yml b/.circleci/config.yml index b9f0376e..52cb3d45 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -56,7 +56,7 @@ jobs: cd build; cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 ..; make all test sphinx; - cp -R cython/sphinx /tmp/sphinx; + cp -R python/sphinx /tmp/sphinx; - store_artifacts: path: /tmp/sphinx diff --git a/.gitignore b/.gitignore index 31efc180..6aab7337 100644 --- a/.gitignore +++ b/.gitignore @@ -2,21 +2,7 @@ build/ # Generated by Cython -src/cython/gudhi/simplex_tree.cpp -src/cython/gudhi/alpha_complex.cpp -src/cython/gudhi/bottleneck.cpp -src/cython/gudhi/cubical_complex.cpp -src/cython/gudhi/euclidean_strong_witness_complex.cpp -src/cython/gudhi/off_reader.cpp -src/cython/gudhi/periodic_cubical_complex.cpp -src/cython/gudhi/reader_utils.cpp -src/cython/gudhi/rips_complex.cpp -src/cython/gudhi/subsampling.cpp -src/cython/gudhi/euclidean_witness_complex.cpp -src/cython/gudhi/nerve_gic.cpp -src/cython/gudhi/strong_witness_complex.cpp -src/cython/gudhi/tangential_complex.cpp -src/cython/gudhi/witness_complex.cpp +src/python/gudhi/*.cpp # Generated by tests data/points/COIL_database/lucky_cat.off_dist diff --git a/CMakeLists.txt b/CMakeLists.txt index 9b97947f..5dcc6803 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -11,7 +11,7 @@ set(GUDHI_MODULES "" CACHE INTERNAL "GUDHI_MODULES") set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES") # This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path -set(GUDHI_CYTHON_PATH "src/cython") +set(GUDHI_PYTHON_PATH "src/python") # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH include(GUDHI_third_party_libraries NO_POLICY_SCOPE) @@ -54,7 +54,7 @@ add_subdirectory(src/GudhUI) if (WITH_GUDHI_PYTHON) # specific for cython module - add_subdirectory(${GUDHI_CYTHON_PATH}) + add_subdirectory(${GUDHI_PYTHON_PATH}) else() message("++ Python module will not be compiled because WITH_GUDHI_PYTHON is set to OFF") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python") diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 40fdcf2b..561aa049 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -10,7 +10,7 @@ set(GUDHI_MODULES "" CACHE INTERNAL "GUDHI_MODULES") set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES") # This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path -set(GUDHI_CYTHON_PATH "cython") +set(GUDHI_PYTHON_PATH "python") # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH include(GUDHI_third_party_libraries NO_POLICY_SCOPE) @@ -61,7 +61,7 @@ add_subdirectory(GudhUI) if (WITH_GUDHI_PYTHON) # specific for cython module - add_subdirectory(${GUDHI_CYTHON_PATH}) + add_subdirectory(${GUDHI_PYTHON_PATH}) else() message("++ Python module will not be compiled because WITH_GUDHI_PYTHON is set to OFF") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python") diff --git a/src/Doxyfile.in b/src/Doxyfile.in index 59f864a0..57775498 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -784,7 +784,7 @@ EXCLUDE = data/ \ example/ \ GudhUI/ \ cmake/ \ - cython/ \ + python/ \ README.md # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index 1dbddfd9..360a230b 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -101,16 +101,18 @@ function( find_python_module PYTHON_MODULE_NAME ) RESULT_VARIABLE PYTHON_MODULE_RESULT OUTPUT_VARIABLE PYTHON_MODULE_VERSION ERROR_VARIABLE PYTHON_MODULE_ERROR) - message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME} - - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT} - - PYTHON_MODULE_VERSION = ${PYTHON_MODULE_VERSION} - - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}") if(PYTHON_MODULE_RESULT EQUAL 0) # Remove carriage return string(STRIP ${PYTHON_MODULE_VERSION} PYTHON_MODULE_VERSION) + message ("++ Python module ${PYTHON_MODULE_NAME} - Version ${PYTHON_MODULE_VERSION} found") + set(${PYTHON_MODULE_NAME_UP}_VERSION ${PYTHON_MODULE_VERSION} PARENT_SCOPE) set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE) else() + message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME} + - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT} + - PYTHON_MODULE_VERSION = ${PYTHON_MODULE_VERSION} + - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}") unset(${PYTHON_MODULE_NAME_UP}_VERSION PARENT_SCOPE) set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE) endif() @@ -125,11 +127,11 @@ if( PYTHONINTERP_FOUND ) find_python_module("sphinx") endif() -if(NOT GUDHI_CYTHON_PATH) - message(FATAL_ERROR "ERROR: GUDHI_CYTHON_PATH is not valid.") -endif(NOT GUDHI_CYTHON_PATH) +if(NOT GUDHI_PYTHON_PATH) + message(FATAL_ERROR "ERROR: GUDHI_PYTHON_PATH is not valid.") +endif(NOT GUDHI_PYTHON_PATH) -option(WITH_GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Usefull when setting rpath is not allowed" ON) +option(WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Usefull when setting rpath is not allowed" ON) if(PYTHONINTERP_FOUND AND CYTHON_FOUND) if(SPHINX_FOUND) @@ -139,7 +141,7 @@ if(PYTHONINTERP_FOUND AND CYTHON_FOUND) if(NOT SPHINX_PATH) if(PYTHON_VERSION_MAJOR EQUAL 3) # In Python3, just hack sphinx-build if it does not exist - set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "${CMAKE_CURRENT_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/doc/python3-sphinx-build.py") + set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "${CMAKE_CURRENT_SOURCE_DIR}/${GUDHI_PYTHON_PATH}/doc/python3-sphinx-build.py") endif(PYTHON_VERSION_MAJOR EQUAL 3) endif(NOT SPHINX_PATH) endif(SPHINX_FOUND) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index 91eee6b5..f75fb19e 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -42,7 +42,7 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E - copy_directory ${CMAKE_SOURCE_DIR}/src/cython ${GUDHI_USER_VERSION_DIR}/cython) + copy_directory ${CMAKE_SOURCE_DIR}/${GUDHI_PYTHON_PATH} ${GUDHI_USER_VERSION_DIR}/python) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/data ${GUDHI_USER_VERSION_DIR}/data) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt deleted file mode 100644 index ab20c6e6..00000000 --- a/src/cython/CMakeLists.txt +++ /dev/null @@ -1,433 +0,0 @@ -project(Cython) - -function( add_gudhi_cython_lib THE_LIB ) - if(EXISTS ${THE_LIB}) - get_filename_component(THE_LIB_FILE_NAME ${THE_LIB} NAME_WE) - if(WIN32) - message("++ ${THE_LIB} => THE_LIB_FILE_NAME = ${THE_LIB_FILE_NAME}") - set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'${THE_LIB_FILE_NAME}', " PARENT_SCOPE) - else(WIN32) - STRING(REGEX REPLACE "lib" "" UNIX_LIB_FILE_NAME ${THE_LIB_FILE_NAME}) - message("++ ${THE_LIB} => UNIX_LIB_FILE_NAME = ${UNIX_LIB_FILE_NAME}") - set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'${UNIX_LIB_FILE_NAME}', " PARENT_SCOPE) - endif(WIN32) - endif(EXISTS ${THE_LIB}) -endfunction( add_gudhi_cython_lib ) - -# THE_TEST is the python test file name (without .py extension) containing tests functions -function( add_gudhi_py_test THE_TEST ) - if(PYTEST_FOUND) - # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python - # use py.test no cache provider, otherwise a .cache file is created in sources by py.test - add_test(NAME ${THE_TEST}_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py) - endif() -endfunction( add_gudhi_py_test ) - -# Set gudhi.__debug_info__ -# WARNING : to be done before setup.py.in configure_file -function( add_gudhi_debug_info DEBUG_INFO ) - set(GUDHI_CYTHON_DEBUG_INFO "${GUDHI_CYTHON_DEBUG_INFO} \"${DEBUG_INFO}\\n\" \\\n" PARENT_SCOPE) -endfunction( add_gudhi_debug_info ) - -if(PYTHONINTERP_FOUND) - if(CYTHON_FOUND) - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'off_reader', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'simplex_tree', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'rips_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'cubical_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'periodic_cubical_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'persistence_graphical_tools', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'reader_utils', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'witness_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'strong_witness_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'bottleneck', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'subsampling', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'tangential_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'alpha_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'euclidean_witness_complex', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'euclidean_strong_witness_complex', ") - - add_gudhi_debug_info("Python version ${PYTHON_VERSION_STRING}") - add_gudhi_debug_info("Cython version ${CYTHON_VERSION}") - if(PYTEST_FOUND) - add_gudhi_debug_info("Pytest version ${PYTEST_VERSION}") - endif() - if(MATPLOTLIB_FOUND) - add_gudhi_debug_info("Matplotlib version ${MATPLOTLIB_VERSION}") - endif() - if(NUMPY_FOUND) - add_gudhi_debug_info("Numpy version ${NUMPY_VERSION}") - endif() - if(SCIPY_FOUND) - add_gudhi_debug_info("Scipy version ${SCIPY_VERSION}") - endif() - - message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_VERSION} - Sphinx is ${SPHINX_PATH}") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_SYSTEM_NO_DEPRECATED', ") - - # Gudhi and CGAL compilation option - if(MSVC) - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'/fp:strict', ") - else(MSVC) - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-std=c++11', ") - endif(MSVC) - if(CMAKE_COMPILER_IS_GNUCXX) - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ") - endif(CMAKE_COMPILER_IS_GNUCXX) - if (CMAKE_CXX_COMPILER_ID MATCHES Intel) - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-fp-model strict', ") - endif(CMAKE_CXX_COMPILER_ID MATCHES Intel) - if (DEBUG_TRACES) - # For programs to be more verbose - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DDEBUG_TRACES', ") - endif() - - if (EIGEN3_FOUND) - add_gudhi_debug_info("Eigen3 version ${EIGEN3_VERSION}") - # No problem, even if no CGAL found - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ") - endif (EIGEN3_FOUND) - - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'off_reader', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'simplex_tree', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'rips_complex', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'cubical_complex', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'periodic_cubical_complex', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'reader_utils', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'witness_complex', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'strong_witness_complex', ") - if (NOT CGAL_VERSION VERSION_LESS 4.11.0) - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'bottleneck', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'nerve_gic', ") - endif () - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'subsampling', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'tangential_complex', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'alpha_complex', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_witness_complex', ") - set(GUDHI_CYTHON_MODULES_TO_COMPILE "${GUDHI_CYTHON_MODULES_TO_COMPILE}'euclidean_strong_witness_complex', ") - endif () - - if(CGAL_FOUND) - can_cgal_use_cxx11_thread_local() - if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT) - if(CMAKE_BUILD_TYPE MATCHES Debug) - add_gudhi_cython_lib("${Boost_THREAD_LIBRARY_DEBUG}") - else() - add_gudhi_cython_lib("${Boost_THREAD_LIBRARY_RELEASE}") - endif() - message("** Add Boost ${Boost_LIBRARY_DIRS}") - set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ") - endif() - # Add CGAL compilation args - if(CGAL_HEADER_ONLY) - add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_HEADER_ONLY', ") - else(CGAL_HEADER_ONLY) - add_gudhi_debug_info("CGAL version ${CGAL_VERSION}") - add_gudhi_cython_lib("${CGAL_LIBRARY}") - set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ") - message("** Add CGAL ${CGAL_LIBRARIES_DIR}") - # If CGAL is not header only, CGAL library may link with boost system, - if(CMAKE_BUILD_TYPE MATCHES Debug) - add_gudhi_cython_lib("${Boost_SYSTEM_LIBRARY_DEBUG}") - else() - add_gudhi_cython_lib("${Boost_SYSTEM_LIBRARY_RELEASE}") - endif() - set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ") - message("** Add Boost ${Boost_LIBRARY_DIRS}") - endif(CGAL_HEADER_ONLY) - # GMP and GMPXX are not required, but if present, CGAL will link with them. - if(GMP_FOUND) - add_gudhi_debug_info("GMP_LIBRARIES = ${GMP_LIBRARIES}") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ") - add_gudhi_cython_lib("${GMP_LIBRARIES}") - set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ") - message("** Add gmp ${GMP_LIBRARIES_DIR}") - if(GMPXX_FOUND) - add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ") - add_gudhi_cython_lib("${GMPXX_LIBRARIES}") - set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ") - message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}") - endif(GMPXX_FOUND) - endif(GMP_FOUND) - endif(CGAL_FOUND) - - # Specific for Mac - if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-mmacosx-version-min=10.12', ") - set(GUDHI_CYTHON_EXTRA_LINK_ARGS "${GUDHI_CYTHON_EXTRA_LINK_ARGS}'-mmacosx-version-min=10.12', ") - endif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") - - # Loop on INCLUDE_DIRECTORIES PROPERTY - get_property(GUDHI_INCLUDE_DIRECTORIES DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES) - foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES}) - set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${GUDHI_INCLUDE_DIRECTORY}', ") - endforeach() - set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${CMAKE_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/include', ") - - if (TBB_FOUND AND WITH_GUDHI_USE_TBB) - add_gudhi_debug_info("TBB version ${TBB_INTERFACE_VERSION} found and used") - set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ") - if(CMAKE_BUILD_TYPE MATCHES Debug) - add_gudhi_cython_lib("${TBB_DEBUG_LIBRARY}") - add_gudhi_cython_lib("${TBB_MALLOC_DEBUG_LIBRARY}") - else() - add_gudhi_cython_lib("${TBB_RELEASE_LIBRARY}") - add_gudhi_cython_lib("${TBB_MALLOC_RELEASE_LIBRARY}") - endif() - set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${TBB_LIBRARY_DIRS}', ") - message("** Add tbb ${TBB_LIBRARY_DIRS}") - set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ") - endif() - - if(UNIX AND WITH_GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS) - set( GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}") - endif(UNIX AND WITH_GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS) - - # Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention - configure_file(setup.py.in "${CMAKE_CURRENT_BINARY_DIR}/setup.py" @ONLY) - - # Generate gudhi/__init__.py - file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi") - configure_file("gudhi/__init__.py.in" "${CMAKE_CURRENT_BINARY_DIR}/gudhi/__init__.py" @ONLY) - - # Other .py files - file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") - - add_custom_command( - OUTPUT gudhi.so - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/setup.py" "build_ext" "--inplace") - - add_custom_target(cython ALL DEPENDS gudhi.so - COMMENT "Do not forget to add ${CMAKE_CURRENT_BINARY_DIR}/ to your PYTHONPATH before using examples or tests") - - install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py install)") - - # Test examples - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - # Bottleneck and Alpha - add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py" - -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3) - - if(MATPLOTLIB_FOUND AND NUMPY_FOUND) - # Tangential - add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py" - --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) - - add_gudhi_py_test(test_tangential_complex) - - # Witness complex AND Subsampling - add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py" - --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2) - - add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py" - --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2) - endif() - - # Subsampling - add_gudhi_py_test(test_subsampling) - - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - if (NOT CGAL_VERSION VERSION_LESS 4.11.0) - # Bottleneck - add_test(NAME bottleneck_basic_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py") - - add_gudhi_py_test(test_bottleneck_distance) - - # Cover complex - file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - add_test(NAME cover_complex_nerve_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py" - -f human.off -c 2 -r 10 -g 0.3) - - add_test(NAME cover_complex_coordinate_gic_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py" - -f human.off -c 0 -v) - - add_test(NAME cover_complex_functional_gic_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py" - -o lucky_cat.off - -f lucky_cat_PCA1 -v) - - add_test(NAME cover_complex_voronoi_gic_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py" - -f human.off -n 700 -v) - - add_gudhi_py_test(test_cover_complex) - endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) - - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - # Alpha - add_test(NAME alpha_complex_from_points_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py") - - if(MATPLOTLIB_FOUND AND NUMPY_FOUND) - add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py" - --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6) - endif() - - add_gudhi_py_test(test_alpha_complex) - - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - - if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - # Euclidean witness - add_gudhi_py_test(test_euclidean_witness_complex) - - endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - - # Cubical - add_test(NAME periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py" - --no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt) - - if(NUMPY_FOUND) - add_test(NAME random_cubical_complex_persistence_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py" - 10 10 10) - endif() - - add_gudhi_py_test(test_cubical_complex) - - # Rips - if(MATPLOTLIB_FOUND AND NUMPY_FOUND) - add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py" - --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3) - - add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py - --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3) - endif() - - add_test(NAME rips_complex_from_points_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_from_points_example.py) - - add_gudhi_py_test(test_rips_complex) - - # Simplex tree - add_test(NAME simplex_tree_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/simplex_tree_example.py) - - add_gudhi_py_test(test_simplex_tree) - - # Witness - add_test(NAME witness_complex_from_nearest_landmark_table_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/witness_complex_from_nearest_landmark_table.py) - - add_gudhi_py_test(test_witness_complex) - - # Reader utils - add_gudhi_py_test(test_reader_utils) - - # Documentation generation is available through sphinx - requires all modules - if(SPHINX_PATH) - if(MATPLOTLIB_FOUND) - if(NUMPY_FOUND) - if(SCIPY_FOUND) - if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/") - # User warning - Sphinx is a static pages generator, and configured to work fine with user_version - # Images and biblio warnings because not found on developper version - if (GUDHI_CYTHON_PATH STREQUAL "src/cython") - set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss") - endif() - # sphinx target requires gudhi.so, because conf.py reads gudhi version from it - add_custom_target(sphinx - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx - DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so" - COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM) - - add_test(NAME sphinx_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" - ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest) - - # Set missing or not modules - set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES") - else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0") - set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) - else(SCIPY_FOUND) - message("++ Python documentation module will not be compiled because scipy was not found") - set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(SCIPY_FOUND) - else(NUMPY_FOUND) - message("++ Python documentation module will not be compiled because numpy was not found") - set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(NUMPY_FOUND) - else(MATPLOTLIB_FOUND) - message("++ Python documentation module will not be compiled because matplotlib was not found") - set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(MATPLOTLIB_FOUND) - else(SPHINX_PATH) - message("++ Python documentation module will not be compiled because sphinx and sphinxcontrib-bibtex were not found") - set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(SPHINX_PATH) - - - # Set missing or not modules - set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") - else(CYTHON_FOUND) - message("++ Python module will not be compiled because cython was not found") - set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(CYTHON_FOUND) -else(PYTHONINTERP_FOUND) - message("++ Python module will not be compiled because no Python interpreter was found") - set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES") -endif(PYTHONINTERP_FOUND) diff --git a/src/cython/CONVENTIONS b/src/cython/CONVENTIONS deleted file mode 100644 index 804e97f3..00000000 --- a/src/cython/CONVENTIONS +++ /dev/null @@ -1,9 +0,0 @@ -Gudhi is following PEP8 conventions. - -Please refer to: -https://www.python.org/dev/peps/pep-0008/ - -A summary: - - modules (filenames) should have short, all-lowercase names, and they can contain underscores. - - packages (directories) should have short, all-lowercase names, preferably without underscores. - - classes should use the CapWords convention. \ No newline at end of file diff --git a/src/cython/README b/src/cython/README deleted file mode 100644 index 7d2c4491..00000000 --- a/src/cython/README +++ /dev/null @@ -1,3 +0,0 @@ - -If you do not want to install the package, just launch the following command to help Python to find the compiled package : -$> export PYTHONPATH=`pwd`:$PYTHONPATH diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html deleted file mode 100644 index bc0e9658..00000000 --- a/src/cython/doc/_templates/layout.html +++ /dev/null @@ -1,275 +0,0 @@ -{# - basic/layout.html - ~~~~~~~~~~~~~~~~~ - - Master layout template for Sphinx themes. - - :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -#} -{%- block doctype -%} - -{%- endblock %} -{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} -{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} -{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and - (sidebars != []) %} -{%- set url_root = pathto('', 1) %} -{# XXX necessary? #} -{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} -{%- if not embedded and docstitle %} - {%- set titlesuffix = " — "|safe + docstitle|e %} -{%- else %} - {%- set titlesuffix = "" %} -{%- endif %} - -{%- macro relbar() %} - -{%- endmacro %} - -{%- macro sidebar() %} - {%- if render_sidebar %} - - {%- endif %} -{%- endmacro %} - -{%- macro script() %} - - {%- for scriptfile in script_files %} - - {%- endfor %} -{%- endmacro %} - -{%- macro css() %} - - - - - - {%- for cssfile in css_files %} - - {%- endfor %} -{%- endmacro %} - - - - - - {{ metatags }} - {%- block htmltitle %} - {{ title|striptags|e }}{{ titlesuffix }} - {%- endblock %} - {{ css() }} - {%- if not embedded %} - {{ script() }} - {%- if use_opensearch %} - - {%- endif %} - {%- if favicon %} - - {%- endif %} - {%- endif %} -{%- block linktags %} - {%- if hasdoc('about') %} - - {%- endif %} - {%- if hasdoc('genindex') %} - - {%- endif %} - {%- if hasdoc('search') %} - - {%- endif %} - {%- if hasdoc('copyright') %} - - {%- endif %} - - {%- if parents %} - - {%- endif %} - {%- if next %} - - {%- endif %} - {%- if prev %} - - {%- endif %} -{%- endblock %} -{%- block extrahead %} {% endblock %} - - - - - - - -{%- block header %}{% endblock %} - -{%- block relbar1 %}{% endblock %} - -{%- block content %} - {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} - -
- {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} -
- {% block body %} {% endblock %} -
- {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} - - {%- block sidebar2 %}{{ sidebar() }}{% endblock %} -
-
-{%- endblock %} - -{%- block relbar2 %}{% endblock %} - -{%- block footer %} - -{%- endblock %} - - - diff --git a/src/cython/doc/alpha_complex_ref.rst b/src/cython/doc/alpha_complex_ref.rst deleted file mode 100644 index 7da79543..00000000 --- a/src/cython/doc/alpha_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -============================== -Alpha complex reference manual -============================== - -.. autoclass:: gudhi.AlphaComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.AlphaComplex.__init__ diff --git a/src/cython/doc/alpha_complex_sum.inc b/src/cython/doc/alpha_complex_sum.inc deleted file mode 100644 index 9049e654..00000000 --- a/src/cython/doc/alpha_complex_sum.inc +++ /dev/null @@ -1,20 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ - | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | - | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | - | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | The filtration value of each simplex is computed as the square of the | | - | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: MIT (`GPL v3 `_) | - | | is then said to be Gabriel), and as the minimum of the filtration | | - | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 | - | | otherwise. All simplices that have a filtration value strictly | | - | | greater than a given alpha squared value are not inserted into the | | - | | complex. | | - | | | | - | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | - | | advised for better performance). | | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ - | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/alpha_complex_user.rst b/src/cython/doc/alpha_complex_user.rst deleted file mode 100644 index d1e9c7cd..00000000 --- a/src/cython/doc/alpha_complex_user.rst +++ /dev/null @@ -1,210 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Alpha complex user manual -========================= -Definition ----------- - -.. include:: alpha_complex_sum.inc - -Alpha_complex is constructing a :doc:`Simplex_tree ` using -`Delaunay Triangulation `_ -:cite:`cgal:hdj-t-15b` from `CGAL `_ (the Computational Geometry Algorithms Library -:cite:`cgal:eb-15b`). - -Remarks -^^^^^^^ -When Alpha_complex is constructed with an infinite value of :math:`\alpha`, the complex is a Delaunay complex. - -Example from points -------------------- - -This example builds the Delaunay triangulation from the given points, and initializes the alpha complex with it: - -.. testcode:: - - import gudhi - alpha_complex = gudhi.AlphaComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]]) - - simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0) - result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - fmt = '%s -> %.2f' - for filtered_value in simplex_tree.get_filtration(): - print(fmt % tuple(filtered_value)) - -The output is: - -.. testoutput:: - - Alpha complex is of dimension 2 - 25 simplices - 7 vertices. - [0] -> 0.00 - [1] -> 0.00 - [2] -> 0.00 - [3] -> 0.00 - [4] -> 0.00 - [5] -> 0.00 - [6] -> 0.00 - [2, 3] -> 6.25 - [4, 5] -> 7.25 - [0, 2] -> 8.50 - [0, 1] -> 9.25 - [1, 3] -> 10.00 - [1, 2] -> 11.25 - [1, 2, 3] -> 12.50 - [0, 1, 2] -> 13.00 - [5, 6] -> 13.25 - [2, 4] -> 20.00 - [4, 6] -> 22.74 - [4, 5, 6] -> 22.74 - [3, 6] -> 30.25 - [2, 6] -> 36.50 - [2, 3, 6] -> 36.50 - [2, 4, 6] -> 37.24 - [0, 4] -> 59.71 - [0, 2, 4] -> 59.71 - - -Algorithm ---------- - -Data structure -^^^^^^^^^^^^^^ - -In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation. -(The filtration value is set to NaN, which stands for unknown value): - -.. figure:: - ../../doc/Alpha_complex/alpha_complex_doc.png - :figclass: align-center - :alt: Simplex tree structure construction example - - Simplex tree structure construction example - -Filtration value computation algorithm -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - **for** i : dimension :math:`\rightarrow` 0 **do** - **for all** :math:`\sigma` of dimension i - **if** filtration(:math:`\sigma`) is NaN **then** - filtration(:math:`\sigma`) = :math:`\alpha^2(\sigma)` - **end if** - - *//propagate alpha filtration value* - - **for all** :math:`\tau` face of :math:`\sigma` - **if** filtration(:math:`\tau`) is not NaN **then** - filtration(:math:`\tau`) = filtration(:math:`\sigma`) - **end if** - **end for** - **end for** - **end for** - - make_filtration_non_decreasing() - - prune_above_filtration() - -Dimension 2 -^^^^^^^^^^^ - -From the example above, it means the algorithm looks into each triangle ([0,1,2], [0,2,4], [1,2,3], ...), -computes the filtration value of the triangle, and then propagates the filtration value as described -here: - -.. figure:: - ../../doc/Alpha_complex/alpha_complex_doc_420.png - :figclass: align-center - :alt: Filtration value propagation example - - Filtration value propagation example - -Dimension 1 -^^^^^^^^^^^ - -Then, the algorithm looks into each edge ([0,1], [0,2], [1,2], ...), -computes the filtration value of the edge (in this case, propagation will have no effect). - -Dimension 0 -^^^^^^^^^^^ - -Finally, the algorithm looks into each vertex ([0], [1], [2], [3], [4], [5] and [6]) and -sets the filtration value (0 in case of a vertex - propagation will have no effect). - -Non decreasing filtration values -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not -quite define a proper filtration (i.e. non-decreasing with respect to inclusion). -We fix that up by calling `Simplex_tree::make_filtration_non_decreasing()` (cf. -`C++ version `_). - -Prune above given filtration value -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()` -in the `C++ version `_). -In the following example, the value is given by the user as argument of the program. - - -Example from OFF file -^^^^^^^^^^^^^^^^^^^^^ - -This example builds the Delaunay triangulation from the points given by an OFF file, and initializes the alpha complex -with it. - - -Then, it is asked to display information about the alpha complex: - -.. testcode:: - - import gudhi - alpha_complex = gudhi.AlphaComplex(off_file=gudhi.__root_source_dir__ + \ - '/data/points/alphacomplexdoc.off') - simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=59.0) - result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - fmt = '%s -> %.2f' - for filtered_value in simplex_tree.get_filtration(): - print(fmt % tuple(filtered_value)) - -the program output is: - -.. testoutput:: - - Alpha complex is of dimension 2 - 23 simplices - 7 vertices. - [0] -> 0.00 - [1] -> 0.00 - [2] -> 0.00 - [3] -> 0.00 - [4] -> 0.00 - [5] -> 0.00 - [6] -> 0.00 - [2, 3] -> 6.25 - [4, 5] -> 7.25 - [0, 2] -> 8.50 - [0, 1] -> 9.25 - [1, 3] -> 10.00 - [1, 2] -> 11.25 - [1, 2, 3] -> 12.50 - [0, 1, 2] -> 13.00 - [5, 6] -> 13.25 - [2, 4] -> 20.00 - [4, 6] -> 22.74 - [4, 5, 6] -> 22.74 - [3, 6] -> 30.25 - [2, 6] -> 36.50 - [2, 3, 6] -> 36.50 - [2, 4, 6] -> 37.24 - -CGAL citations -============== - -.. bibliography:: ../../biblio/how_to_cite_cgal.bib - :filter: docnames - :style: unsrt diff --git a/src/cython/doc/bottleneck_distance_sum.inc b/src/cython/doc/bottleneck_distance_sum.inc deleted file mode 100644 index 6eb0ac19..00000000 --- a/src/cython/doc/bottleneck_distance_sum.inc +++ /dev/null @@ -1,14 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+ - | .. figure:: | Bottleneck distance measures the similarity between two persistence | :Author: François Godi | - | ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | | - | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 | - | | diagonal points) such that any couple of matched points are at | | - | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :Copyright: MIT (`GPL v3 `_) | - | the longest edge | norm in :math:`\mathbb{R}^2`. | | - | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | - +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+ - | * :doc:`bottleneck_distance_user` | | - +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/bottleneck_distance_user.rst b/src/cython/doc/bottleneck_distance_user.rst deleted file mode 100644 index 9435c7f1..00000000 --- a/src/cython/doc/bottleneck_distance_user.rst +++ /dev/null @@ -1,67 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Bottleneck distance user manual -=============================== -Definition ----------- - -.. include:: bottleneck_distance_sum.inc - -This implementation is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" -:cite:`DBLP:journals/algorithmica/EfratIK01`. Another relevant publication, although it was not used is -"Geometry Helps to Compare Persistence Diagrams" :cite:`Kerber:2017:GHC:3047249.3064175`. - -Function --------- -.. autofunction:: gudhi.bottleneck_distance - -Distance computation --------------------- - -The following example explains how the distance is computed: - -.. testcode:: - - import gudhi - - message = "Bottleneck distance = " + '%.1f' % gudhi.bottleneck_distance([[0., 0.]], [[0., 13.]]) - print(message) - -.. testoutput:: - - Bottleneck distance = 6.5 - -.. figure:: - ../../doc/Bottleneck_distance/bottleneck_distance_example.png - :figclass: align-center - - The point (0, 13) is at distance 6.5 from the diagonal and more - specifically from the point (6.5, 6.5) - - -Basic example -------------- - -This other example computes the bottleneck distance from 2 persistence diagrams: - -.. testcode:: - - import gudhi - - diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]] - diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]] - - message = "Bottleneck distance approximation = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0.1) - print(message) - - message = "Bottleneck distance value = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2) - print(message) - -The output is: - -.. testoutput:: - - Bottleneck distance approximation = 0.81 - Bottleneck distance value = 0.75 diff --git a/src/cython/doc/citation.rst b/src/cython/doc/citation.rst deleted file mode 100644 index 117eb9dd..00000000 --- a/src/cython/doc/citation.rst +++ /dev/null @@ -1,19 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Acknowledging the GUDHI library -############################### - -We kindly ask users to cite the GUDHI library as appropriately as possible in -their papers, and to mention the use of the GUDHI library on the web pages of -their projects using GUDHI and provide us with links to these web pages. Feel -free to contact us in case you have any question or remark on this topic. - -We provide GUDHI bibtex entries for the modules of the User and Reference -Manual, as well as for publications directly related to the GUDHI library. - -GUDHI bibtex -************ - -.. literalinclude:: ../../biblio/how_to_cite_gudhi.bib diff --git a/src/cython/doc/conf.py b/src/cython/doc/conf.py deleted file mode 100755 index e4c718c3..00000000 --- a/src/cython/doc/conf.py +++ /dev/null @@ -1,203 +0,0 @@ -# -*- coding: utf-8 -*- -# -# GUDHI documentation build configuration file, created by -# sphinx-quickstart on Thu Jun 30 09:55:51 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# Path to Gudhi.so from source path -sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'matplotlib.sphinxext.plot_directive', - 'sphinx.ext.autodoc', - 'sphinx.ext.doctest', - 'sphinx.ext.todo', - 'sphinx.ext.mathjax', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', - 'sphinxcontrib.bibtex', -] - -todo_include_todos = True -# plot option : do not show hyperlinks (Source code, png, hires.png, pdf) -plot_html_show_source_link = False -plot_html_show_formats = False -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -import gudhi - -# General information about the project. -project = gudhi.__name__ -copyright = gudhi.__copyright__ + ' - MIT' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = gudhi.__version__ -# The full version, including alpha/beta/rc tags. -#release = '2.0.1-rc1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build', '*.inc'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'classic' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "sidebarbgcolor": "#A1ADCD", - "sidebartextcolor": "black", - "sidebarlinkcolor": "#334D5C", - "body_max_width": "100%", -} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {'installation': 'installation.html'} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'GUDHIdoc' - diff --git a/src/cython/doc/cubical_complex_ref.rst b/src/cython/doc/cubical_complex_ref.rst deleted file mode 100644 index 1fe9d5fb..00000000 --- a/src/cython/doc/cubical_complex_ref.rst +++ /dev/null @@ -1,13 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Cubical complex reference manual -################################ - -.. autoclass:: gudhi.CubicalComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.CubicalComplex.__init__ diff --git a/src/cython/doc/cubical_complex_sum.inc b/src/cython/doc/cubical_complex_sum.inc deleted file mode 100644 index f200e695..00000000 --- a/src/cython/doc/cubical_complex_sum.inc +++ /dev/null @@ -1,14 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ - | .. figure:: | The cubical complex is an example of a structured complex useful in | :Author: Pawel Dlotko | - | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | | - | :alt: Cubical complex representation | analysis. | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | | | - | | | :Copyright: MIT | - | | | | - +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ - | * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | - | | * :doc:`periodic_cubical_complex_ref` | - +--------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/cubical_complex_user.rst b/src/cython/doc/cubical_complex_user.rst deleted file mode 100644 index 19120360..00000000 --- a/src/cython/doc/cubical_complex_user.rst +++ /dev/null @@ -1,168 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Cubical complex user manual -=========================== -Definition ----------- - -===================================== ===================================== ===================================== -:Author: Pawel Dlotko :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3 -===================================== ===================================== ===================================== - -+---------------------------------------------+----------------------------------------------------------------------+ -| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | -| | * :doc:`periodic_cubical_complex_ref` | -+---------------------------------------------+----------------------------------------------------------------------+ - -The cubical complex is an example of a structured complex useful in computational mathematics (specially rigorous -numerics) and image analysis. - -An *elementary interval* is an interval of a form :math:`[n,n+1]`, or :math:`[n,n]`, for :math:`n \in \mathcal{Z}`. -The first one is called *non-degenerate*, while the second one is a *degenerate* interval. A -*boundary of a elementary interval* is a chain :math:`\partial [n,n+1] = [n+1,n+1]-[n,n]` in case of -non-degenerated elementary interval and :math:`\partial [n,n] = 0` in case of degenerate elementary interval. An -*elementary cube* :math:`C` is a product of elementary intervals, :math:`C=I_1 \times \ldots \times I_n`. -*Embedding dimension* of a cube is n, the number of elementary intervals (degenerate or not) in the product. -A *dimension of a cube* :math:`C=I_1 \times ... \times I_n` is the number of non degenerate elementary -intervals in the product. A *boundary of a cube* :math:`C=I_1 \times \ldots \times I_n` is a chain obtained -in the following way: - -.. math:: - - \partial C = (\partial I_1 \times \ldots \times I_n) + (I_1 \times \partial I_2 \times \ldots \times I_n) + - \ldots + (I_1 \times I_2 \times \ldots \times \partial I_n). - -A *cubical complex* :math:`\mathcal{K}` is a collection of cubes closed under operation of taking boundary -(i.e. boundary of every cube from the collection is in the collection). A cube :math:`C` in cubical complex -:math:`\mathcal{K}` is *maximal* if it is not in a boundary of any other cube in :math:`\mathcal{K}`. A -*support* of a cube :math:`C` is the set in :math:`\mathbb{R}^n` occupied by :math:`C` (:math:`n` is the embedding -dimension of :math:`C`). - -Cubes may be equipped with a filtration values in which case we have filtered cubical complex. All the cubical -complexes considered in this implementation are filtered cubical complexes (although, the range of a filtration may -be a set of two elements). - -For further details and theory of cubical complexes, please consult :cite:`kaczynski2004computational` as well as the -following paper :cite:`peikert2012topological`. - -Data structure. ---------------- - -The implementation of Cubical complex provides a representation of complexes that occupy a rectangular region in -:math:`\mathbb{R}^n`. This extra assumption allows for a memory efficient way of storing cubical complexes in a form -of so called bitmaps. Let -:math:`R = [b_1,e_1] \times \ldots \times [b_n,e_n]`, for :math:`b_1,...b_n,e_1,...,e_n \in \mathbb{Z}`, -:math:`b_i \leq d_i` be the considered rectangular region and let :math:`\mathcal{K}` be a filtered -cubical complex having the rectangle :math:`R` as its support. Note that the structure of the coordinate system gives -a way a lexicographical ordering of cells of :math:`\mathcal{K}`. This ordering is a base of the presented -bitmap-based implementation. In this implementation, the whole cubical complex is stored as a vector of the values -of filtration. This, together with dimension of :math:`\mathcal{K}` and the sizes of :math:`\mathcal{K}` in all -directions, allows to determine, dimension, neighborhood, boundary and coboundary of every cube -:math:`C \in \mathcal{K}`. - -.. figure:: - ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png - :alt: Cubical complex. - :figclass: align-center - - Cubical complex. - -Note that the cubical complex in the figure above is, in a natural way, a product of one dimensional cubical -complexes in :math:`\mathbb{R}`. The number of all cubes in each direction is equal :math:`2n+1`, where :math:`n` is -the number of maximal cubes in the considered direction. Let us consider a cube at the position :math:`k` in the -bitmap. -Knowing the sizes of the bitmap, by a series of modulo operation, we can determine which elementary intervals are -present in the product that gives the cube :math:`C`. In a similar way, we can compute boundary and the coboundary of -each cube. Further details can be found in the literature. - -Input Format. -------------- - -In the current implantation, filtration is given at the maximal cubes, and it is then extended by the lower star -filtration to all cubes. There are a number of constructors that can be used to construct cubical complex by users -who want to use the code directly. They can be found in the :doc:`cubical_complex_ref`. -Currently one input from a text file is used. It uses a format inspired from the Perseus software -`Perseus software `_ by Vidit Nanda. - -.. note:: - While Perseus assume the filtration of all maximal cubes to be non-negative, over here we do not enforce this and - we allow any filtration values. As a consequence one cannot use ``-1``'s to indicate missing cubes. If you have - missing cubes in your complex, please set their filtration to :math:`+\infty` (aka. ``inf`` in the file). - -The file format is described in details in :ref:`Perseus file format` file format section. - -.. testcode:: - - import gudhi - cubical_complex = gudhi.CubicalComplex(perseus_file=gudhi.__root_source_dir__ + \ - '/data/bitmap/cubicalcomplexdoc.txt') - result_str = 'Cubical complex is of dimension ' + repr(cubical_complex.dimension()) + ' - ' + \ - repr(cubical_complex.num_simplices()) + ' simplices.' - print(result_str) - -the program output is: - -.. testoutput:: - - Cubical complex is of dimension 2 - 49 simplices. - -Periodic boundary conditions. ------------------------------ - -Often one would like to impose periodic boundary conditions to the cubical complex (cf. -:doc:`periodic_cubical_complex_ref`). -Let :math:`I_1\times ... \times I_n` be a box that is decomposed with a cubical complex :math:`\mathcal{K}`. -Imposing periodic boundary conditions in the direction i, means that the left and the right side of a complex -:math:`\mathcal{K}` are considered the same. In particular, if for a bitmap :math:`\mathcal{K}` periodic boundary -conditions are imposed in all directions, then complex :math:`\mathcal{K}` became n-dimensional torus. One can use -various constructors from the file Bitmap_cubical_complex_periodic_boundary_conditions_base.h to construct cubical -complex with periodic boundary conditions. - -One can also use Perseus style input files (see :doc:`Perseus `) for the specific periodic case: - -.. testcode:: - - import gudhi - periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \ - '/data/bitmap/periodiccubicalcomplexdoc.txt') - result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \ - repr(periodic_cc.num_simplices()) + ' simplices.' - print(result_str) - -the program output is: - -.. testoutput:: - - Periodic cubical complex is of dimension 2 - 42 simplices. - -Or it can be defined as follows: - -.. testcode:: - - from gudhi import PeriodicCubicalComplex as pcc - periodic_cc = pcc(dimensions=[3,3], - top_dimensional_cells= [0, 0, 0, 0, 1, 0, 0, 0, 0], - periodic_dimensions=[True, False]) - result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \ - repr(periodic_cc.num_simplices()) + ' simplices.' - print(result_str) - -the program output is: - -.. testoutput:: - - Periodic cubical complex is of dimension 2 - 42 simplices. - -Examples. ---------- - -End user programs are available in cython/example/ folder. - -Bibliography -============ - -.. bibliography:: ../../biblio/bibliography.bib - :filter: docnames - :style: unsrt diff --git a/src/cython/doc/euclidean_strong_witness_complex_ref.rst b/src/cython/doc/euclidean_strong_witness_complex_ref.rst deleted file mode 100644 index 1a602cd5..00000000 --- a/src/cython/doc/euclidean_strong_witness_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -================================================= -Euclidean strong witness complex reference manual -================================================= - -.. autoclass:: gudhi.EuclideanStrongWitnessComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.EuclideanStrongWitnessComplex.__init__ diff --git a/src/cython/doc/euclidean_witness_complex_ref.rst b/src/cython/doc/euclidean_witness_complex_ref.rst deleted file mode 100644 index 28daf965..00000000 --- a/src/cython/doc/euclidean_witness_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -========================================== -Euclidean witness complex reference manual -========================================== - -.. autoclass:: gudhi.EuclideanWitnessComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.EuclideanWitnessComplex.__init__ diff --git a/src/cython/doc/examples.rst b/src/cython/doc/examples.rst deleted file mode 100644 index edbc2f72..00000000 --- a/src/cython/doc/examples.rst +++ /dev/null @@ -1,30 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Examples -######## - -.. only:: builder_html - - * :download:`rips_complex_from_points_example.py <../example/rips_complex_from_points_example.py>` - * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>` - * :download:`simplex_tree_example.py <../example/simplex_tree_example.py>` - * :download:`alpha_rips_persistence_bottleneck_distance.py <../example/alpha_rips_persistence_bottleneck_distance.py>` - * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` - * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` - * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>` - * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>` - * :download:`witness_complex_from_nearest_landmark_table.py <../example/witness_complex_from_nearest_landmark_table.py>` - * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` - * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` - * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` - * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` - * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` - * :download:`sparse_rips_persistence_diagram.py <../example/sparse_rips_persistence_diagram.py>` - * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>` - * :download:`coordinate_graph_induced_complex.py <../example/coordinate_graph_induced_complex.py>` - * :download:`functional_graph_induced_complex.py <../example/functional_graph_induced_complex.py>` - * :download:`voronoi_graph_induced_complex.py <../example/voronoi_graph_induced_complex.py>` - * :download:`nerve_of_a_covering.py <../example/nerve_of_a_covering.py>` diff --git a/src/cython/doc/fileformats.rst b/src/cython/doc/fileformats.rst deleted file mode 100644 index 345dfdba..00000000 --- a/src/cython/doc/fileformats.rst +++ /dev/null @@ -1,127 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -File formats -############ - -OFF file format -*************** - -OFF files must be conform to format described here: -http://www.geomview.org/docs/html/OFF.html - -OFF files are mainly used as point cloud inputs. Here is an example of 7 points -in a 3-dimensional space. As edges and faces are not used for point set, there -is no need to specify them (just set their numbers to 0): - -.. literalinclude:: ../../data/points/alphacomplexdoc.off - -.. centered:: ../../points/alphacomplexdoc.off - -For dimensions bigger than 3, the dimension can be set like here:: - - # Dimension is no more 3 - nOFF - # dimension 4 - 7 vertices - 0 face - 0 edge - 4 7 0 0 - # Point set: - 1.0 1.0 0.0 0.0 - 7.0 0.0 0.0 0.0 - 4.0 6.0 0.0 0.0 - 9.0 6.0 0.0 0.0 - 0.0 14.0 0.0 0.0 - 2.0 19.0 0.0 0.0 - 9.0 17.0 0.0 0.0 - -Persistence Diagram -******************* - -Such a file, whose extension is usually ``.pers``, contains a list of -persistence intervals. - -Lines starting with ``#`` are ignored (comments). - -Other lines might contain 2, 3 or 4 values (the number of values on each line -must be the same for all lines):: - - [[field] dimension] birth death - -Here is a simple sample file:: - - # Persistence diagram example - 2 2.7 3.7 - 2 9.6 14. - # Some comments - 3 34.2 34.974 - 4 3. inf - -Other sample files can be found in the `data/persistence_diagram` folder. - -Such files can be generated with -:meth:`gudhi.SimplexTree.write_persistence_diagram`, read with -:meth:`gudhi.read_persistence_intervals_grouped_by_dimension`, or -:meth:`gudhi.read_persistence_intervals_in_dimension` and displayed with -:meth:`gudhi.plot_persistence_barcode` or -:meth:`gudhi.plot_persistence_diagram`. - -Iso-cuboid -********** - -Such a file describes an iso-oriented cuboid with diagonal opposite vertices -(min_x, min_y, min_z,...) and (max_x, max_y, max_z, ...). The format is:: - - min_x min_y [min_z ...] - max_x max_y [max_z ...] - -Here is a simple sample file in the 3D case:: - - -1. -1. -1. - 1. 1. 1. - - -.. _Perseus file format: - -Perseus -******* - -This file format is a format inspired from the -`Perseus software `_ by Vidit Nanda. -The first line contains a number d begin the dimension of the bitmap (2 in the -example below). Next d lines are the numbers of top dimensional cubes in each -dimensions (3 and 3 in the example below). Next, in lexicographical order, the -filtration of top dimensional cubes is given (1 4 6 8 20 4 7 6 5 in the example -below). - -.. figure:: - ../../doc/Bitmap_cubical_complex/exampleBitmap.png - :alt: Example of a input data. - :figclass: align-center - - Example of a input data. - -The input file for the following complex is: - -.. literalinclude:: ../../data/bitmap/cubicalcomplexdoc.txt - -.. centered:: ../../data/bitmap/cubicalcomplexdoc.txt - -To indicate periodic boundary conditions in a given direction, then number of -top dimensional cells in this direction have to be multiplied by -1. For -instance: - -.. literalinclude:: ../../data/bitmap/periodiccubicalcomplexdoc.txt - -.. centered:: ../../data/bitmap/periodiccubicalcomplexdoc.txt - - -Indicate that we have imposed periodic boundary conditions in the direction x, -but not in the direction y. - -Other sample files can be found in the `data/bitmap` folder. - -.. note:: - Unlike in Perseus format the filtration on the maximal cubes can be any - double precision number. Consequently one cannot mark the cubes that are - not present with ``-1``'s. To do that please set their filtration value to - :math:`+\infty` (aka. ``inf`` in the file). \ No newline at end of file diff --git a/src/cython/doc/img/graphical_tools_representation.png b/src/cython/doc/img/graphical_tools_representation.png deleted file mode 100644 index 9759f7ba..00000000 Binary files a/src/cython/doc/img/graphical_tools_representation.png and /dev/null differ diff --git a/src/cython/doc/index.rst b/src/cython/doc/index.rst deleted file mode 100644 index e379bc23..00000000 --- a/src/cython/doc/index.rst +++ /dev/null @@ -1,86 +0,0 @@ -GUDHI Python module documentation -################################# - -.. figure:: - ../../doc/common/Gudhi_banner.png - :alt: Gudhi banner - :figclass: align-center - -Complexes -********* - -Cubical complexes -================= - -.. include:: cubical_complex_sum.inc - -Simplicial complexes -==================== - -Alpha complex -------------- - -.. include:: alpha_complex_sum.inc - -Rips complex -------------- - -.. include:: rips_complex_sum.inc - -Witness complex ---------------- - -.. include:: witness_complex_sum.inc - -Cover complexes -=============== - -.. include:: nerve_gic_complex_sum.inc - -Data structures and basic operations -************************************ - -Data structures -=============== - -Simplex tree ------------- - -.. include:: simplex_tree_sum.inc - -Topological descriptors computation -*********************************** - -Persistence cohomology -====================== - -.. include:: persistent_cohomology_sum.inc - -Manifold reconstruction -*********************** - -Tangential complex -================== - -.. include:: tangential_complex_sum.inc - - -Topological descriptors tools -***************************** - -Bottleneck distance -=================== - -.. include:: bottleneck_distance_sum.inc - -Persistence graphical tools -=========================== - -.. include:: persistence_graphical_tools_sum.inc - -Bibliography -************ - -.. bibliography:: ../../biblio/bibliography.bib - :filter: docnames - :style: unsrt diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst deleted file mode 100644 index 02b889d0..00000000 --- a/src/cython/doc/installation.rst +++ /dev/null @@ -1,256 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Installation -############ - -Compiling -********* -The library uses c++11 and requires `Boost `_ ≥ 1.56.0, -`CMake `_ ≥ 3.1 to generate makefiles, and -`Cython `_ to compile the GUDHI Python module. -It is a multi-platform library and compiles on Linux, Mac OSX and Visual -Studio 2015. - -On `Windows `_ , only Python -3.5 and 3.6 are available because of the required Visual Studio version. - -On other systems, if you have several Python/Cython installed, the version 2.X -will be used by default, but you can force it by adding -:code:`-DPython_ADDITIONAL_VERSIONS=3` to the cmake command. - -GUDHI Python module compilation -=============================== - -To build the GUDHI Python module, run the following commands in a terminal: - -.. code-block:: bash - - cd /path-to-gudhi/ - mkdir build - cd build/ - cmake .. - cd cython - make - -GUDHI Python module installation -================================ - -Once the compilation succeeds, one can add the GUDHI Python module path to the -PYTHONPATH: - -.. code-block:: bash - - # For windows, you have to set PYTHONPATH environment variable - export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/cython' - -Or install it definitely in your Python packages folder: - -.. code-block:: bash - - cd /path-to-gudhi/build/cython - # May require sudo or administrator privileges - make install - - -Test suites -=========== - -To test your build, `py.test `_ is optional. Run the -following command in a terminal: - -.. code-block:: bash - - cd /path-to-gudhi/build/cython - # For windows, you have to set PYTHONPATH environment variable - export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/cython' - make test - -Debugging issues -================ - -If tests fail, please check your PYTHONPATH and try to :code:`import gudhi` -and check the errors. -The problem can come from a third-party library bad link or installation. - -If :code:`import gudhi` succeeds, please have a look to debug information: - -.. code-block:: python - - import gudhi - print(gudhi.__debug_info__) - -You shall have something like: - -.. code-block:: none - - Python version 2.7.15 - Cython version 0.26.1 - Eigen3 version 3.1.1 - Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex; - persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;alpha_complex; - euclidean_witness_complex;euclidean_strong_witness_complex; - Missing modules are: bottleneck_distance;nerve_gic;subsampling;tangential_complex;persistence_graphical_tools; - CGAL version 4.7.1000 - GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so - GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so - TBB version 9107 found and used - -Here, you can see that bottleneck_distance, nerve_gic, subsampling and -tangential_complex are missing because of the CGAL version. -persistence_graphical_tools is not available as numpy and matplotlib are not -available. -Unitary tests cannot be run as pytest is missing. - -A complete configuration would be : - -.. code-block:: none - - Python version 3.6.5 - Cython version 0.28.2 - Pytest version 3.3.2 - Matplotlib version 2.2.2 - Numpy version 1.14.5 - Eigen3 version 3.3.4 - Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex; - persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;persistence_graphical_tools; - bottleneck_distance;nerve_gic;subsampling;tangential_complex;alpha_complex;euclidean_witness_complex; - euclidean_strong_witness_complex; - CGAL header only version 4.11.0 - GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so - GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so - TBB version 9107 found and used - -Documentation -============= - -To build the documentation, `sphinx-doc `_ and -`sphinxcontrib-bibtex `_ are -required. As the documentation is auto-tested, `CGAL`_, `Eigen3`_, -`Matplotlib`_, `NumPy`_ and `SciPy`_ are also mandatory to build the -documentation. - -Run the following commands in a terminal: - -.. code-block:: bash - - cd /path-to-gudhi/build/cython - make sphinx - -Optional third-party library -**************************** - -CGAL -==== - -The :doc:`Alpha complex `, -:doc:`Tangential complex ` and -:doc:`Witness complex ` data structures, and -:doc:`Bottleneck distance ` requires CGAL, which is a -C++ library which provides easy access to efficient and reliable geometric -algorithms. - -The procedure to install this library -according to your operating system is detailed -`here `_. - -The following examples requires CGAL version ≥ 4.11.0: - -.. only:: builder_html - - * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` - * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>` - * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>` - * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` - * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` - -Eigen3 -====== - -The :doc:`Alpha complex `, -:doc:`Tangential complex ` and -:doc:`Witness complex ` data structures and few -examples requires `Eigen3 `_, a C++ template -library for linear algebra: matrices, vectors, numerical solvers, and related -algorithms. - -The following examples require the `Eigen3 `_: - -.. only:: builder_html - - * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` - * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>` - * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` - * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` - -Matplotlib -========== - -The :doc:`persistence graphical tools ` -module requires `Matplotlib `_, a Python 2D plotting -library which produces publication quality figures in a variety of hardcopy -formats and interactive environments across platforms. - -The following examples require the `Matplotlib `_: - -.. only:: builder_html - - * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` - * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>` - * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` - * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` - * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` - * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` - * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` - * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` - -NumPy -===== - -The :doc:`persistence graphical tools ` -module requires `NumPy `_, a fundamental package for -scientific computing with Python. - -The following examples require the `NumPy `_: - -.. only:: builder_html - - * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` - * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>` - * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` - * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` - * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` - * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` - * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` - * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` - -SciPy -===== - -The :doc:`persistence graphical tools ` -module requires `SciPy `_, a Python-based ecosystem of -open-source software for mathematics, science, and engineering. - -Threading Building Blocks -========================= - -`Intel® TBB `_ lets you easily write -parallel C++ programs that take full advantage of multicore performance, that -are portable and composable, and that have future-proof scalability. - -Having Intel® TBB installed is recommended to parallelize and accelerate some -GUDHI computations. - -Bug reports and contributions -***************************** - -Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: - - Contact: gudhi-users@lists.gforge.inria.fr - -GUDHI is open to external contributions. If you want to join our development team, please contact us. diff --git a/src/cython/doc/nerve_gic_complex_ref.rst b/src/cython/doc/nerve_gic_complex_ref.rst deleted file mode 100644 index abde2e8c..00000000 --- a/src/cython/doc/nerve_gic_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -================================ -Cover complexes reference manual -================================ - -.. autoclass:: gudhi.CoverComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.CoverComplex.__init__ diff --git a/src/cython/doc/nerve_gic_complex_sum.inc b/src/cython/doc/nerve_gic_complex_sum.inc deleted file mode 100644 index d633c4ff..00000000 --- a/src/cython/doc/nerve_gic_complex_sum.inc +++ /dev/null @@ -1,16 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ - | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière | - | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | | - | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 | - | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | | - | | the image of a scalar-valued function defined on the data. | :Copyright: MIT (`GPL v3 `_) | - | | | | - | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | - | | | | - | | | | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ - | * :doc:`nerve_gic_complex_user` | * :doc:`nerve_gic_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/nerve_gic_complex_user.rst b/src/cython/doc/nerve_gic_complex_user.rst deleted file mode 100644 index 9101f45d..00000000 --- a/src/cython/doc/nerve_gic_complex_user.rst +++ /dev/null @@ -1,315 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Cover complexes user manual -=========================== -Definition ----------- - -.. include:: nerve_gic_complex_sum.inc - -Visualizations of the simplicial complexes can be done with either -neato (from `graphviz `_), -`geomview `_, -`KeplerMapper `_. -Input point clouds are assumed to be OFF files (cf. :doc:`fileformats`). - -Covers ------- - -Nerves and Graph Induced Complexes require a cover C of the input point cloud P, -that is a set of subsets of P whose union is P itself. -Very often, this cover is obtained from the preimage of a family of intervals covering -the image of some scalar-valued function f defined on P. This family is parameterized -by its resolution, which can be either the number or the length of the intervals, -and its gain, which is the overlap percentage between consecutive intervals (ordered by their first values). - -Nerves ------- - -Nerve definition -^^^^^^^^^^^^^^^^ - -Assume you are given a cover C of your point cloud P. Then, the Nerve of this cover -is the simplicial complex that has one k-simplex per k-fold intersection of cover elements. -See also `Wikipedia `_. - -.. figure:: - ../../doc/Nerve_GIC/nerve.png - :figclass: align-center - :alt: Nerve of a double torus - - Nerve of a double torus - -Example -^^^^^^^ - -This example builds the Nerve of a point cloud sampled on a 3D human shape (human.off). -The cover C comes from the preimages of intervals (10 intervals with gain 0.3) -covering the height function (coordinate 2), -which are then refined into their connected components using the triangulation of the .OFF file. - -.. testcode:: - - import gudhi - nerve_complex = gudhi.CoverComplex() - nerve_complex.set_verbose(True) - - if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ - '/data/points/human.off')): - nerve_complex.set_type('Nerve') - nerve_complex.set_color_from_coordinate(2) - nerve_complex.set_function_from_coordinate(2) - nerve_complex.set_graph_from_OFF() - nerve_complex.set_resolution_with_interval_number(10) - nerve_complex.set_gain(0.3) - nerve_complex.set_cover_from_function() - nerve_complex.find_simplices() - nerve_complex.write_info() - simplex_tree = nerve_complex.create_simplex_tree() - nerve_complex.compute_PD() - result_str = 'Nerve is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - for filtered_value in simplex_tree.get_filtration(): - print(filtered_value[0]) - -the program output is: - -.. code-block:: none - - Min function value = -0.979672 and Max function value = 0.816414 - Interval 0 = [-0.979672, -0.761576] - Interval 1 = [-0.838551, -0.581967] - Interval 2 = [-0.658942, -0.402359] - Interval 3 = [-0.479334, -0.22275] - Interval 4 = [-0.299725, -0.0431414] - Interval 5 = [-0.120117, 0.136467] - Interval 6 = [0.059492, 0.316076] - Interval 7 = [0.239101, 0.495684] - Interval 8 = [0.418709, 0.675293] - Interval 9 = [0.598318, 0.816414] - Computing preimages... - Computing connected components... - 5 interval(s) in dimension 0: - [-0.909111, 0.0081753] - [-0.171433, 0.367393] - [-0.171433, 0.367393] - [-0.909111, 0.745853] - 0 interval(s) in dimension 1: - -.. testoutput:: - - Nerve is of dimension 1 - 41 simplices - 21 vertices. - [0] - [1] - [4] - [1, 4] - [2] - [0, 2] - [8] - [2, 8] - [5] - [4, 5] - [9] - [8, 9] - [13] - [5, 13] - [14] - [9, 14] - [19] - [13, 19] - [25] - [32] - [20] - [20, 32] - [33] - [25, 33] - [26] - [14, 26] - [19, 26] - [42] - [26, 42] - [34] - [33, 34] - [27] - [20, 27] - [35] - [27, 35] - [34, 35] - [35, 42] - [44] - [35, 44] - [54] - [44, 54] - - -The program also writes a file ../../data/points/human.off_sc.txt. The first -three lines in this file are the location of the input point cloud and the -function used to compute the cover. -The fourth line contains the number of vertices nv and edges ne of the Nerve. -The next nv lines represent the vertices. Each line contains the vertex ID, -the number of data points it contains, and their average color function value. -Finally, the next ne lines represent the edges, characterized by the ID of -their vertices. - -Using KeplerMapper, one can obtain the following visualization: - -.. figure:: - ../../doc/Nerve_GIC/nervevisu.jpg - :figclass: align-center - :alt: Visualization with KeplerMapper - - Visualization with KeplerMapper - -Graph Induced Complexes (GIC) ------------------------------ - -GIC definition -^^^^^^^^^^^^^^ - -Again, assume you are given a cover C of your point cloud P. Moreover, assume -you are also given a graph G built on top of P. Then, for any clique in G -whose nodes all belong to different elements of C, the GIC includes a -corresponding simplex, whose dimension is the number of nodes in the clique -minus one. -See :cite:`Dey13` for more details. - -.. figure:: - ../../doc/Nerve_GIC/GIC.jpg - :figclass: align-center - :alt: GIC of a point cloud - - GIC of a point cloud - -Example with cover from Voronoï -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example builds the GIC of a point cloud sampled on a 3D human shape -(human.off). -We randomly subsampled 100 points in the point cloud, which act as seeds of -a geodesic Voronoï diagram. Each cell of the diagram is then an element of C. -The graph G (used to compute both the geodesics for Voronoï and the GIC) -comes from the triangulation of the human shape. Note that the resulting -simplicial complex is in dimension 3 in this example. - -.. testcode:: - - import gudhi - nerve_complex = gudhi.CoverComplex() - - if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ - '/data/points/human.off')): - nerve_complex.set_type('GIC') - nerve_complex.set_color_from_coordinate() - nerve_complex.set_graph_from_OFF() - nerve_complex.set_cover_from_Voronoi(700) - nerve_complex.find_simplices() - nerve_complex.plot_off() - -the program outputs SC.off. Using e.g. - -.. code-block:: none - - geomview ../../data/points/human.off_sc.off - -one can obtain the following visualization: - -.. figure:: - ../../doc/Nerve_GIC/gicvoronoivisu.jpg - :figclass: align-center - :alt: Visualization with Geomview - - Visualization with Geomview - -Functional GIC -^^^^^^^^^^^^^^ - -If one restricts to the cliques in G whose nodes all belong to preimages of -consecutive intervals (assuming the cover of the height function is minimal, -i.e. no more than two intervals can intersect at a time), the GIC is of -dimension one, i.e. a graph. -We call this graph the functional GIC. See :cite:`Carriere16` for more details. - -Example -^^^^^^^ - -Functional GIC comes with automatic selection of the Rips threshold, -the resolution and the gain of the function cover. See :cite:`Carriere17c` for -more details. In this example, we compute the functional GIC of a Klein bottle -embedded in R^5, where the graph G comes from a Rips complex with automatic -threshold, and the cover C comes from the preimages of intervals covering the -first coordinate, with automatic resolution and gain. Note that automatic -threshold, resolution and gain can be computed as well for the Nerve. - -.. testcode:: - - import gudhi - nerve_complex = gudhi.CoverComplex() - - if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ - '/data/points/KleinBottle5D.off')): - nerve_complex.set_type('GIC') - nerve_complex.set_color_from_coordinate(0) - nerve_complex.set_function_from_coordinate(0) - nerve_complex.set_graph_from_automatic_rips() - nerve_complex.set_automatic_resolution() - nerve_complex.set_gain() - nerve_complex.set_cover_from_function() - nerve_complex.find_simplices() - nerve_complex.plot_dot() - -the program outputs SC.dot. Using e.g. - -.. code-block:: none - - neato ../../data/points/KleinBottle5D.off_sc.dot -Tpdf -o ../../data/points/KleinBottle5D.off_sc.pdf - -one can obtain the following visualization: - -.. figure:: - ../../doc/Nerve_GIC/coordGICvisu2.jpg - :figclass: align-center - :alt: Visualization with neato - - Visualization with neato - -where nodes are colored by the filter function values and, for each node, the -first number is its ID and the second is the number of data points that its -contain. - -We also provide an example on a set of 72 pictures taken around the same object -(lucky_cat.off). -The function is now the first eigenfunction given by PCA, whose values are -written in a file (lucky_cat_PCA1). Threshold, resolution and gain are -automatically selected as before. - -.. testcode:: - - import gudhi - nerve_complex = gudhi.CoverComplex() - - if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ - '/data/points/COIL_database/lucky_cat.off')): - nerve_complex.set_type('GIC') - pca_file = gudhi.__root_source_dir__ + \ - '/data/points/COIL_database/lucky_cat_PCA1' - nerve_complex.set_color_from_file(pca_file) - nerve_complex.set_function_from_file(pca_file) - nerve_complex.set_graph_from_automatic_rips() - nerve_complex.set_automatic_resolution() - nerve_complex.set_gain() - nerve_complex.set_cover_from_function() - nerve_complex.find_simplices() - nerve_complex.plot_dot() - -the program outputs again SC.dot which gives the following visualization after using neato: - -.. figure:: - ../../doc/Nerve_GIC/funcGICvisu.jpg - :figclass: align-center - :alt: Visualization with neato - - Visualization with neato diff --git a/src/cython/doc/periodic_cubical_complex_ref.rst b/src/cython/doc/periodic_cubical_complex_ref.rst deleted file mode 100644 index 4b831647..00000000 --- a/src/cython/doc/periodic_cubical_complex_ref.rst +++ /dev/null @@ -1,13 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Periodic cubical complex reference manual -######################################### - -.. autoclass:: gudhi.PeriodicCubicalComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.PeriodicCubicalComplex.__init__ diff --git a/src/cython/doc/persistence_graphical_tools_ref.rst b/src/cython/doc/persistence_graphical_tools_ref.rst deleted file mode 100644 index 54aff4bc..00000000 --- a/src/cython/doc/persistence_graphical_tools_ref.rst +++ /dev/null @@ -1,12 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -============================================ -Persistence graphical tools reference manual -============================================ - -.. autofunction:: gudhi.__min_birth_max_death -.. autofunction:: gudhi.plot_persistence_barcode -.. autofunction:: gudhi.plot_persistence_diagram -.. autofunction:: gudhi.plot_persistence_density diff --git a/src/cython/doc/persistence_graphical_tools_sum.inc b/src/cython/doc/persistence_graphical_tools_sum.inc deleted file mode 100644 index 0cdf8072..00000000 --- a/src/cython/doc/persistence_graphical_tools_sum.inc +++ /dev/null @@ -1,14 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ - | .. figure:: | These graphical tools comes on top of persistence results and allows | :Author: Vincent Rouvreau | - | img/graphical_tools_representation.png | the user to build easily persistence barcode, diagram or density. | | - | | | :Introduced in: GUDHI 2.0.0 | - | | | | - | | | :Copyright: MIT | - | | | | - | | | :Requires: matplotlib, numpy and scipy | - +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ - | * :doc:`persistence_graphical_tools_user` | * :doc:`persistence_graphical_tools_ref` | - +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/persistence_graphical_tools_user.rst b/src/cython/doc/persistence_graphical_tools_user.rst deleted file mode 100644 index b2124fdd..00000000 --- a/src/cython/doc/persistence_graphical_tools_user.rst +++ /dev/null @@ -1,73 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Persistence graphical tools user manual -======================================= -Definition ----------- -.. include:: persistence_graphical_tools_sum.inc - - -Show persistence as a barcode ------------------------------ - -.. note:: - this function requires matplotlib and numpy to be available - -This function can display the persistence result as a barcode: - -.. plot:: - :include-source: - - import gudhi - - off_file = gudhi.__root_source_dir__ + '/data/points/tore3D_300.off' - point_cloud = gudhi.read_off(off_file=off_file) - - rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=0.7) - simplex_tree = rips_complex.create_simplex_tree(max_dimension=3) - diag = simplex_tree.persistence(min_persistence=0.4) - - plot = gudhi.plot_persistence_barcode(diag) - plot.show() - -Show persistence as a diagram ------------------------------ - -.. note:: - this function requires matplotlib and numpy to be available - -This function can display the persistence result as a diagram: - -.. plot:: - :include-source: - - import gudhi - - # rips_on_tore3D_1307.pers obtained from write_persistence_diagram method - persistence_file=gudhi.__root_source_dir__ + \ - '/data/persistence_diagram/rips_on_tore3D_1307.pers' - plt = gudhi.plot_persistence_diagram(persistence_file=persistence_file, - legend=True) - plt.show() - -Persistence density -------------------- - -.. note:: - this function requires matplotlib, numpy and scipy to be available - -If you want more information on a specific dimension, for instance: - -.. plot:: - :include-source: - - import gudhi - - # rips_on_tore3D_1307.pers obtained from write_persistence_diagram method - persistence_file=gudhi.__root_source_dir__ + \ - '/data/persistence_diagram/rips_on_tore3D_1307.pers' - plt = gudhi.plot_persistence_density(persistence_file=persistence_file, - max_intervals=0, dimension=1, legend=True) - plt.show() diff --git a/src/cython/doc/persistent_cohomology_sum.inc b/src/cython/doc/persistent_cohomology_sum.inc deleted file mode 100644 index 4d7b077e..00000000 --- a/src/cython/doc/persistent_cohomology_sum.inc +++ /dev/null @@ -1,26 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ - | .. figure:: | The theory of homology consists in attaching to a topological space | :Author: Clément Maria | - | ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | | - | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Introduced in: GUDHI 2.0.0 | - | | homology studies the evolution -- birth, life and death -- of these | | - | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :Copyright: MIT | - | Torus | theory is essentially composed of three elements: topological spaces, | | - | | their homology groups and an evolution scheme. | | - | | | | - | | Computation of persistent cohomology using the algorithm of | | - | | :cite:`DBLP:journals/dcg/SilvaMV11` and | | - | | :cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed | | - | | Annotation Matrix implementation of | | - | | :cite:`DBLP:conf/esa/BoissonnatDM13`. | | - | | | | - +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ - | * :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | - | | feature for reference: | - | | | - | | * :doc:`simplex_tree_ref` | - | | * :doc:`cubical_complex_ref` | - | | * :doc:`periodic_cubical_complex_ref` | - +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/persistent_cohomology_user.rst b/src/cython/doc/persistent_cohomology_user.rst deleted file mode 100644 index de83cda1..00000000 --- a/src/cython/doc/persistent_cohomology_user.rst +++ /dev/null @@ -1,120 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Persistent cohomology user manual -================================= -Definition ----------- -===================================== ===================================== ===================================== -:Author: Clément Maria :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3 -===================================== ===================================== ===================================== - -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ -| :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | -| | feature for reference: | -| | | -| | * :doc:`simplex_tree_ref` | -| | * :doc:`cubical_complex_ref` | -| | * :doc:`periodic_cubical_complex_ref` | -+-----------------------------------------------------------------+-----------------------------------------------------------------------+ - - -Computation of persistent cohomology using the algorithm of :cite:`DBLP:journals/dcg/SilvaMV11` and -:cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed Annotation Matrix implementation of -:cite:`DBLP:conf/esa/BoissonnatDM13`. - -The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing global -topological features like connected components, holes, cavities, etc. Persistent homology studies the evolution -- -birth, life and death -- of these features when the topological space is changing. Consequently, the theory is -essentially composed of three elements: - -* topological spaces -* their homology groups -* an evolution scheme. - -Topological Spaces ------------------- - -Topological spaces are represented by simplicial complexes. -Let :math:`V = \{1, \cdots ,|V|\}` be a set of *vertices*. -A *simplex* :math:`\sigma` is a subset of vertices :math:`\sigma \subseteq V`. -A *simplicial complex* :math:`\mathbf{K}` on :math:`V` is a collection of simplices :math:`\{\sigma\}`, -:math:`\sigma \subseteq V`, such that :math:`\tau \subseteq \sigma \in \mathbf{K} \Rightarrow \tau \in \mathbf{K}`. -The dimension :math:`n=|\sigma|-1` of :math:`\sigma` is its number of elements minus 1. -A *filtration* of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying -:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`. - -Homology --------- - -For a ring :math:`\mathcal{R}`, the group of *n-chains*, denoted :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})`, of -:math:`\mathbf{K}` is the group of formal sums of n-simplices with :math:`\mathcal{R}` coefficients. The -*boundary operator* is a linear operator -:math:`\partial_n: \mathbf{C}_n(\mathbf{K},\mathcal{R}) \rightarrow \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R})` -such that :math:`\partial_n \sigma = \partial_n [v_0, \cdots , v_n] = \sum_{i=0}^n (-1)^{i}[v_0,\cdots ,\widehat{v_i}, \cdots,v_n]`, -where :math:`\widehat{v_i}` means :math:`v_i` is omitted from the list. The chain groups form a sequence: - -.. math:: - - \cdots \ \ \mathbf{C}_n(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_n\ } - \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R}) \xrightarrow{\partial_{n-1}} \cdots \xrightarrow{\ \partial_2 \ } - \mathbf{C}_1(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_1 \ } \mathbf{C}_0(\mathbf{K},\mathcal{R}) - -of finitely many groups :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})` and homomorphisms :math:`\partial_n`, indexed by -the dimension :math:`n \geq 0`. The boundary operators satisfy the property :math:`\partial_n \circ \partial_{n+1}=0` -for every :math:`n > 0` and we define the homology groups: - -.. math:: - - \mathbf{H}_n(\mathbf{K},\mathcal{R}) = \ker \partial_n / \mathrm{im} \ \partial_{n+1} - -We refer to :cite:`Munkres-elementsalgtop1984` for an introduction to homology -theory and to :cite:`DBLP:books/daglib/0025666` for an introduction to persistent homology. - -Indexing Scheme ---------------- - -"Changing" a simplicial complex consists in applying a simplicial map. An *indexing scheme* is a directed graph -together with a traversal order, such that two consecutive nodes in the graph are connected by an arrow (either forward -or backward). -The nodes represent simplicial complexes and the directed edges simplicial maps. - -From the computational point of view, there are two types of indexing schemes of interest in persistent homology: - -* linear ones - :math:`\bullet \longrightarrow \bullet \longrightarrow \cdots \longrightarrow \bullet \longrightarrow \bullet` - in persistent homology :cite:`DBLP:journals/dcg/ZomorodianC05`, -* zigzag ones - :math:`\bullet \longrightarrow \bullet \longleftarrow \cdots \longrightarrow \bullet \longleftarrow \bullet` - in zigzag persistent homology :cite:`DBLP:journals/focm/CarlssonS10`. - -These indexing schemes have a natural left-to-right traversal order, and we describe them with ranges and iterators. -In the current release of the Gudhi library, only the linear case is implemented. - -In the following, we consider the case where the indexing scheme is induced by a filtration. - -Ordering the simplices by increasing filtration values (breaking ties so as a simplex appears after its subsimplices of -same filtration value) provides an indexing scheme. - -Examples --------- - -We provide several example files: run these examples with -h for details on their use. - -.. only:: builder_html - - * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` - * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` - * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` - * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` - * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` - * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>` - * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` - -Bibliography -============ - -.. bibliography:: ../../biblio/bibliography.bib - :filter: docnames - :style: unsrt diff --git a/src/cython/doc/python3-sphinx-build.py b/src/cython/doc/python3-sphinx-build.py deleted file mode 100755 index 84d158cf..00000000 --- a/src/cython/doc/python3-sphinx-build.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python3 - -""" -Emulate sphinx-build for python3 -""" - -from sys import exit, argv -from sphinx import main - -if __name__ == '__main__': - exit(main(argv)) diff --git a/src/cython/doc/reader_utils_ref.rst b/src/cython/doc/reader_utils_ref.rst deleted file mode 100644 index f3ecebad..00000000 --- a/src/cython/doc/reader_utils_ref.rst +++ /dev/null @@ -1,15 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -============================= -Reader utils reference manual -============================= - -.. autofunction:: gudhi.read_off - -.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file - -.. autofunction:: gudhi.read_persistence_intervals_grouped_by_dimension - -.. autofunction:: gudhi.read_persistence_intervals_in_dimension diff --git a/src/cython/doc/rips_complex_ref.rst b/src/cython/doc/rips_complex_ref.rst deleted file mode 100644 index 22b5616c..00000000 --- a/src/cython/doc/rips_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -============================= -Rips complex reference manual -============================= - -.. autoclass:: gudhi.RipsComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.RipsComplex.__init__ diff --git a/src/cython/doc/rips_complex_sum.inc b/src/cython/doc/rips_complex_sum.inc deleted file mode 100644 index 857c6893..00000000 --- a/src/cython/doc/rips_complex_sum.inc +++ /dev/null @@ -1,16 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ - | .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse | - | ../../doc/Rips_complex/rips_complex_representation.png | graph. | | - | :figclass: align-center | | :Introduced in: GUDHI 2.0.0 | - | | The filtration value of each edge is computed from a user-given | | - | | distance function and is inserted until a user-given threshold | :Copyright: MIT | - | | value. | | - | | | | - | | This complex can be built from a point cloud and a distance function, | | - | | or from a distance matrix. | | - +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ - | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` | - +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/rips_complex_user.rst b/src/cython/doc/rips_complex_user.rst deleted file mode 100644 index 1d340dbe..00000000 --- a/src/cython/doc/rips_complex_user.rst +++ /dev/null @@ -1,345 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Rips complex user manual -========================= -Definition ----------- - -==================================================================== ================================ ====================== -:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 -==================================================================== ================================ ====================== - -+-------------------------------------------+----------------------------------------------------------------------+ -| :doc:`rips_complex_user` | :doc:`rips_complex_ref` | -+-------------------------------------------+----------------------------------------------------------------------+ - -The `Rips complex `_ is a simplicial complex that -generalizes proximity (:math:`\varepsilon`-ball) graphs to higher dimensions. The vertices correspond to the input -points, and a simplex is present if and only if its diameter is smaller than some parameter α. Considering all -parameters α defines a filtered simplicial complex, where the filtration value of a simplex is its diameter. -The filtration can be restricted to values α smaller than some threshold, to reduce its size. - -The input discrete metric space can be provided as a point cloud plus a distance function, or as a distance matrix. - -When creating a simplicial complex from the graph, :doc:`RipsComplex ` first builds the graph and -inserts it into the data structure. It then expands the simplicial complex (adds the simplices corresponding to cliques) -when required. The expansion can be stopped at dimension `max_dimension`, by default 1. - -A vertex name corresponds to the index of the point in the given range (aka. the point cloud). - -.. figure:: - ../../doc/Rips_complex/rips_complex_representation.png - :align: center - - Rips-complex one skeleton graph representation - -On this example, as edges (4,5), (4,6) and (5,6) are in the complex, simplex (4,5,6) is added with the filtration value -set with :math:`max(filtration(4,5), filtration(4,6), filtration(5,6))`. And so on for simplex (0,1,2,3). - -If the `RipsComplex` interfaces are not detailed enough for your need, please refer to rips_persistence_step_by_step.cpp -C++ example, where the graph construction over the Simplex_tree is more detailed. - -A Rips complex can easily become huge, even if we limit the length of the edges -and the dimension of the simplices. One easy trick, before building a Rips -complex on a point cloud, is to call `sparsify_point_set` which removes points -that are too close to each other. This does not change its persistence diagram -by more than the length used to define "too close". - -A more general technique is to use a sparse approximation of the Rips -introduced by Don Sheehy :cite:`sheehy13linear`. We are using the version -described in :cite:`buchet16efficient` (except that we multiply all filtration -values by 2, to match the usual Rips complex). :cite:`cavanna15geometric` proves -a :math:`\frac{1}{1-\varepsilon}`-interleaving, although in practice the -error is usually smaller. A more intuitive presentation of the idea is -available in :cite:`cavanna15geometric`, and in a video -:cite:`cavanna15visualizing`. Passing an extra argument `sparse=0.3` at the -construction of a `RipsComplex` object asks it to build a sparse Rips with -parameter :math:`\varepsilon=0.3`, while the default `sparse=None` builds the -regular Rips complex. - - -Point cloud ------------ - -Example from a point cloud -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example builds the neighborhood graph from the given points, up to max_edge_length. -Then it creates a :doc:`Simplex_tree ` with it. - -Finally, it is asked to display information about the simplicial complex. - -.. testcode:: - - import gudhi - rips_complex = gudhi.RipsComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]], - max_edge_length=12.0) - - simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) - result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - fmt = '%s -> %.2f' - for filtered_value in simplex_tree.get_filtration(): - print(fmt % tuple(filtered_value)) - -When launching (Rips maximal distance between 2 points is 12.0, is expanded -until dimension 1 - one skeleton graph in other words), the output is: - -.. testoutput:: - - Rips complex is of dimension 1 - 18 simplices - 7 vertices. - [0] -> 0.00 - [1] -> 0.00 - [2] -> 0.00 - [3] -> 0.00 - [4] -> 0.00 - [5] -> 0.00 - [6] -> 0.00 - [2, 3] -> 5.00 - [4, 5] -> 5.39 - [0, 2] -> 5.83 - [0, 1] -> 6.08 - [1, 3] -> 6.32 - [1, 2] -> 6.71 - [5, 6] -> 7.28 - [2, 4] -> 8.94 - [0, 3] -> 9.43 - [4, 6] -> 9.49 - [3, 6] -> 11.00 - -Notice that if we use - -.. code-block:: python - - rips_complex = gudhi.RipsComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]], - max_edge_length=12.0, sparse=2) - -asking for a very sparse version (theory only gives some guarantee on the meaning of the output if `sparse<1`), -2 to 5 edges disappear, depending on the random vertex used to start the sparsification. - -Example from OFF file -^^^^^^^^^^^^^^^^^^^^^ - -This example builds the :doc:`RipsComplex ` from the given -points in an OFF file, and max_edge_length value. -Then it creates a :doc:`Simplex_tree ` with it. - -Finally, it is asked to display information about the Rips complex. - - -.. testcode:: - - import gudhi - point_cloud = gudhi.read_off(off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off') - rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=12.0) - simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) - result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - fmt = '%s -> %.2f' - for filtered_value in simplex_tree.get_filtration(): - print(fmt % tuple(filtered_value)) - -the program output is: - -.. testoutput:: - - Rips complex is of dimension 1 - 18 simplices - 7 vertices. - [0] -> 0.00 - [1] -> 0.00 - [2] -> 0.00 - [3] -> 0.00 - [4] -> 0.00 - [5] -> 0.00 - [6] -> 0.00 - [2, 3] -> 5.00 - [4, 5] -> 5.39 - [0, 2] -> 5.83 - [0, 1] -> 6.08 - [1, 3] -> 6.32 - [1, 2] -> 6.71 - [5, 6] -> 7.28 - [2, 4] -> 8.94 - [0, 3] -> 9.43 - [4, 6] -> 9.49 - [3, 6] -> 11.00 - -Distance matrix ---------------- - -Example from a distance matrix -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This example builds the one skeleton graph from the given distance matrix, and max_edge_length value. -Then it creates a :doc:`Simplex_tree ` with it. - -Finally, it is asked to display information about the simplicial complex. - -.. testcode:: - - import gudhi - rips_complex = gudhi.RipsComplex(distance_matrix=[[], - [6.0827625303], - [5.8309518948, 6.7082039325], - [9.4339811321, 6.3245553203, 5], - [13.0384048104, 15.6524758425, 8.94427191, 12.0415945788], - [18.0277563773, 19.6468827044, 13.152946438, 14.7648230602, 5.3851648071], - [17.88854382, 17.1172427686, 12.0830459736, 11, 9.4868329805, 7.2801098893]], - max_edge_length=12.0) - - simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) - result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - fmt = '%s -> %.2f' - for filtered_value in simplex_tree.get_filtration(): - print(fmt % tuple(filtered_value)) - -When launching (Rips maximal distance between 2 points is 12.0, is expanded -until dimension 1 - one skeleton graph in other words), the output is: - -.. testoutput:: - - Rips complex is of dimension 1 - 18 simplices - 7 vertices. - [0] -> 0.00 - [1] -> 0.00 - [2] -> 0.00 - [3] -> 0.00 - [4] -> 0.00 - [5] -> 0.00 - [6] -> 0.00 - [2, 3] -> 5.00 - [4, 5] -> 5.39 - [0, 2] -> 5.83 - [0, 1] -> 6.08 - [1, 3] -> 6.32 - [1, 2] -> 6.71 - [5, 6] -> 7.28 - [2, 4] -> 8.94 - [0, 3] -> 9.43 - [4, 6] -> 9.49 - [3, 6] -> 11.00 - -Example from csv file -^^^^^^^^^^^^^^^^^^^^^ - -This example builds the :doc:`RipsComplex ` from the given -distance matrix in a csv file, and max_edge_length value. -Then it creates a :doc:`Simplex_tree ` with it. - -Finally, it is asked to display information about the Rips complex. - - -.. testcode:: - - import gudhi - distance_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=gudhi.__root_source_dir__ + \ - '/data/distance_matrix/full_square_distance_matrix.csv') - rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, max_edge_length=12.0) - simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) - result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - fmt = '%s -> %.2f' - for filtered_value in simplex_tree.get_filtration(): - print(fmt % tuple(filtered_value)) - -the program output is: - -.. testoutput:: - - Rips complex is of dimension 1 - 18 simplices - 7 vertices. - [0] -> 0.00 - [1] -> 0.00 - [2] -> 0.00 - [3] -> 0.00 - [4] -> 0.00 - [5] -> 0.00 - [6] -> 0.00 - [2, 3] -> 5.00 - [4, 5] -> 5.39 - [0, 2] -> 5.83 - [0, 1] -> 6.08 - [1, 3] -> 6.32 - [1, 2] -> 6.71 - [5, 6] -> 7.28 - [2, 4] -> 8.94 - [0, 3] -> 9.43 - [4, 6] -> 9.49 - [3, 6] -> 11.00 - -Correlation matrix ------------------- - -Example from a correlation matrix -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Analogously to the case of distance matrix, Rips complexes can be also constructed based on correlation matrix. -Given a correlation matrix M, comportment-wise 1-M is a distance matrix. -This example builds the one skeleton graph from the given corelation matrix and threshold value. -Then it creates a :doc:`Simplex_tree ` with it. - -Finally, it is asked to display information about the simplicial complex. - -.. testcode:: - - import gudhi - import numpy as np - - # User defined correlation matrix is: - # |1 0.06 0.23 0.01 0.89| - # |0.06 1 0.74 0.01 0.61| - # |0.23 0.74 1 0.72 0.03| - # |0.01 0.01 0.72 1 0.7 | - # |0.89 0.61 0.03 0.7 1 | - correlation_matrix=np.array([[1., 0.06, 0.23, 0.01, 0.89], - [0.06, 1., 0.74, 0.01, 0.61], - [0.23, 0.74, 1., 0.72, 0.03], - [0.01, 0.01, 0.72, 1., 0.7], - [0.89, 0.61, 0.03, 0.7, 1.]], float) - - distance_matrix = np.ones((correlation_matrix.shape),float) - correlation_matrix - rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0) - - simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) - result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ - repr(simplex_tree.num_simplices()) + ' simplices - ' + \ - repr(simplex_tree.num_vertices()) + ' vertices.' - print(result_str) - fmt = '%s -> %.2f' - for filtered_value in simplex_tree.get_filtration(): - print(fmt % tuple(filtered_value)) - -When launching (Rips maximal distance between 2 points is 12.0, is expanded -until dimension 1 - one skeleton graph in other words), the output is: - -.. testoutput:: - - Rips complex is of dimension 1 - 15 simplices - 5 vertices. - [0] -> 0.00 - [1] -> 0.00 - [2] -> 0.00 - [3] -> 0.00 - [4] -> 0.00 - [0, 4] -> 0.11 - [1, 2] -> 0.26 - [2, 3] -> 0.28 - [3, 4] -> 0.30 - [1, 4] -> 0.39 - [0, 2] -> 0.77 - [0, 1] -> 0.94 - [2, 4] -> 0.97 - [0, 3] -> 0.99 - [1, 3] -> 0.99 - -.. note:: - As persistence diagrams points will be under the diagonal, - bottleneck distance and persistence graphical tool will not work properly, - this is a known issue. diff --git a/src/cython/doc/simplex_tree_ref.rst b/src/cython/doc/simplex_tree_ref.rst deleted file mode 100644 index 9eb8c199..00000000 --- a/src/cython/doc/simplex_tree_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -============================= -Simplex tree reference manual -============================= - -.. autoclass:: gudhi.SimplexTree - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.SimplexTree.__init__ diff --git a/src/cython/doc/simplex_tree_sum.inc b/src/cython/doc/simplex_tree_sum.inc deleted file mode 100644 index 5ba58d2b..00000000 --- a/src/cython/doc/simplex_tree_sum.inc +++ /dev/null @@ -1,13 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ - | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria | - | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | | - | :alt: Simplex tree representation | | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | The data structure is described in | | - | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: MIT | - | | | | - +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ - | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` | - +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/simplex_tree_user.rst b/src/cython/doc/simplex_tree_user.rst deleted file mode 100644 index aebeb29f..00000000 --- a/src/cython/doc/simplex_tree_user.rst +++ /dev/null @@ -1,72 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Simplex tree user manual -======================== -Definition ----------- - -.. include:: simplex_tree_sum.inc - -A simplicial complex :math:`\mathbf{K}` on a set of vertices :math:`V = \{1, \cdots ,|V|\}` is a collection of -simplices :math:`\{\sigma\}`, :math:`\sigma \subseteq V` such that -:math:`\tau \subseteq \sigma \in \mathbf{K} \rightarrow \tau \in \mathbf{K}`. The dimension :math:`n=|\sigma|-1` of -:math:`\sigma` is its number of elements minus `1`. - -A filtration of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying -:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`. Ordering the simplices by increasing filtration -values (breaking ties so as a simplex appears after its subsimplices of same filtration value) provides an indexing -scheme. - - -Implementation --------------- - -There are two implementation of complexes. The first on is the Simplex_tree data structure. -The simplex tree is an efficient and flexible data structure for representing general (filtered) simplicial complexes. -The data structure is described in :cite`boissonnatmariasimplextreealgorithmica`. - -The second one is the Hasse_complex. The Hasse complex is a data structure representing explicitly all co-dimension 1 -incidence relations in a complex. It is consequently faster when accessing the boundary of a simplex, but is less -compact and harder to construct from scratch. - -Example -------- - -.. testcode:: - - import gudhi - st = gudhi.SimplexTree() - if st.insert([0, 1]): - print("[0, 1] inserted") - if st.insert([0, 1, 2], filtration=4.0): - print("[0, 1, 2] inserted") - if st.find([0, 1]): - print("[0, 1] found") - result_str = 'num_vertices=' + repr(st.num_vertices()) - print(result_str) - result_str = 'num_simplices=' + repr(st.num_simplices()) - print(result_str) - print("skeleton(2) =") - for sk_value in st.get_skeleton(2): - print(sk_value) - - -The output is: - -.. testoutput:: - - [0, 1] inserted - [0, 1, 2] inserted - [0, 1] found - num_vertices=3 - num_simplices=7 - skeleton(2) = - ([0, 1, 2], 4.0) - ([0, 1], 0.0) - ([0, 2], 4.0) - ([0], 0.0) - ([1, 2], 4.0) - ([1], 0.0) - ([2], 4.0) diff --git a/src/cython/doc/strong_witness_complex_ref.rst b/src/cython/doc/strong_witness_complex_ref.rst deleted file mode 100644 index d624d711..00000000 --- a/src/cython/doc/strong_witness_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -======================================= -Strong witness complex reference manual -======================================= - -.. autoclass:: gudhi.StrongWitnessComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.StrongWitnessComplex.__init__ diff --git a/src/cython/doc/tangential_complex_ref.rst b/src/cython/doc/tangential_complex_ref.rst deleted file mode 100644 index cdfda082..00000000 --- a/src/cython/doc/tangential_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -=================================== -Tangential complex reference manual -=================================== - -.. autoclass:: gudhi.TangentialComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.TangentialComplex.__init__ diff --git a/src/cython/doc/tangential_complex_sum.inc b/src/cython/doc/tangential_complex_sum.inc deleted file mode 100644 index c8bc1177..00000000 --- a/src/cython/doc/tangential_complex_sum.inc +++ /dev/null @@ -1,14 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ - | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | - | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | - | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 | - | | an unknown manifold. The running time depends only linearly on the | | - | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: MIT (`GPL v3 `_) | - | | dimension :math:`k`. | | - | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | - +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ - | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` | - +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/cython/doc/tangential_complex_user.rst b/src/cython/doc/tangential_complex_user.rst deleted file mode 100644 index ebfe1e29..00000000 --- a/src/cython/doc/tangential_complex_user.rst +++ /dev/null @@ -1,204 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Tangential complex user manual -============================== -.. include:: tangential_complex_sum.inc - -Definition ----------- - -A Tangential Delaunay complex is a simplicial complex designed to reconstruct a -:math:`k`-dimensional smooth manifold embedded in :math:`d`-dimensional -Euclidean space. The input is a point sample coming from an unknown manifold, -which means that the points lie close to a structure of "small" intrinsic -dimension. The running time depends only linearly on the extrinsic dimension -:math:`d` and exponentially on the intrinsic dimension :math:`k`. - -An extensive description of the Tangential complex can be found in -:cite:`tangentialcomplex2014`. - -What is a Tangential Complex? -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Let us start with the description of the Tangential complex of a simple -example, with :math:`k = 1` and :math:`d = 2`. The point set -:math:`\mathscr P` is located on a closed curve embedded in 2D. -Only 4 points will be displayed (more are required for PCA) to simplify the -figures. - -.. figure:: ../../doc/Tangential_complex/tc_example_01.png - :alt: The input - :figclass: align-center - - The input - -For each point :math:`P`, estimate its tangent subspace :math:`T_P` using PCA. - -.. figure:: ../../doc/Tangential_complex/tc_example_02.png - :alt: The estimated normals - :figclass: align-center - - The estimated normals - - -Let us add the Voronoi diagram of the points in orange. For each point -:math:`P`, construct its star in the Delaunay triangulation of -:math:`\mathscr P` restricted to :math:`T_P`. - -.. figure:: ../../doc/Tangential_complex/tc_example_03.png - :alt: The Voronoi diagram - :figclass: align-center - - The Voronoi diagram - -The Tangential Delaunay complex is the union of those stars. - -In practice, neither the ambient Voronoi diagram nor the ambient Delaunay -triangulation is computed. Instead, local :math:`k`-dimensional regular -triangulations are computed with a limited number of points as we only need the -star of each point. More details can be found in :cite:`tangentialcomplex2014`. - -Inconsistencies -^^^^^^^^^^^^^^^ -Inconsistencies between the stars can occur. An inconsistency occurs when a -simplex is not in the star of all its vertices. - -Let us take the same example. - -.. figure:: ../../doc/Tangential_complex/tc_example_07_before.png - :alt: Before - :figclass: align-center - - Before - -Let us slightly move the tangent subspace :math:`T_Q` - -.. figure:: ../../doc/Tangential_complex/tc_example_07_after.png - :alt: After - :figclass: align-center - - After - -Now, the star of :math:`Q` contains :math:`QP`, but the star of :math:`P` does -not contain :math:`QP`. We have an inconsistency. - -.. figure:: ../../doc/Tangential_complex/tc_example_08.png - :alt: After - :figclass: align-center - - After - -One way to solve inconsistencies is to randomly perturb the positions of the -points involved in an inconsistency. In the current implementation, this -perturbation is done in the tangent subspace of each point. The maximum -perturbation radius is given as a parameter to the constructor. - -In most cases, we recommend to provide a point set where the minimum distance -between any two points is not too small. This can be achieved using the -functions provided by the Subsampling module. Then, a good value to start with -for the maximum perturbation radius would be around half the minimum distance -between any two points. The Example with perturbation below shows an example of -such a process. - -In most cases, this process is able to dramatically reduce the number of -inconsistencies, but is not guaranteed to succeed. - -Output -^^^^^^ -The result of the computation is exported as a Simplex_tree. It is the union of -the stars of all the input points. A vertex in the Simplex Tree is the index of -the point in the range provided by the user. The point corresponding to a -vertex can also be obtained through the Tangential_complex::get_point function. -Note that even if the positions of the points are perturbed, their original -positions are kept (e.g. Tangential_complex::get_point returns the original -position of the point). - -The result can be obtained after the computation of the Tangential complex -itself and/or after the perturbation process. - - -Simple example --------------- - -This example builds the Tangential complex of point set read in an OFF file. - -.. testcode:: - - import gudhi - tc = gudhi.TangentialComplex(intrisic_dim = 1, - off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off') - tc.compute_tangential_complex() - result_str = 'Tangential contains ' + repr(tc.num_simplices()) + \ - ' simplices - ' + repr(tc.num_vertices()) + ' vertices.' - print(result_str) - - st = tc.create_simplex_tree() - result_str = 'Simplex tree is of dimension ' + repr(st.dimension()) + \ - ' - ' + repr(st.num_simplices()) + ' simplices - ' + \ - repr(st.num_vertices()) + ' vertices.' - print(result_str) - for filtered_value in st.get_filtration(): - print(filtered_value[0]) - -The output is: - -.. testoutput:: - - Tangential contains 12 simplices - 7 vertices. - Simplex tree is of dimension 1 - 15 simplices - 7 vertices. - [0] - [1] - [0, 1] - [2] - [0, 2] - [1, 2] - [3] - [1, 3] - [4] - [2, 4] - [5] - [4, 5] - [6] - [3, 6] - [5, 6] - - -Example with perturbation -------------------------- - -This example builds the Tangential complex of a point set, then tries to solve -inconsistencies by perturbing the positions of points involved in inconsistent -simplices. - -.. testcode:: - - import gudhi - tc = gudhi.TangentialComplex(intrisic_dim = 1, - points=[[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]) - tc.compute_tangential_complex() - result_str = 'Tangential contains ' + repr(tc.num_vertices()) + ' vertices.' - print(result_str) - - if tc.num_inconsistent_simplices() > 0: - print('Tangential contains inconsistencies.') - - tc.fix_inconsistencies_using_perturbation(10, 60) - if tc.num_inconsistent_simplices() == 0: - print('Inconsistencies has been fixed.') - -The output is: - -.. testoutput:: - - Tangential contains 4 vertices. - Inconsistencies has been fixed. - - -Bibliography -============ - -.. bibliography:: ../../biblio/bibliography.bib - :filter: docnames - :style: unsrt diff --git a/src/cython/doc/todos.rst b/src/cython/doc/todos.rst deleted file mode 100644 index ca274ced..00000000 --- a/src/cython/doc/todos.rst +++ /dev/null @@ -1,9 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -========== -To be done -========== - -.. todolist:: diff --git a/src/cython/doc/witness_complex_ref.rst b/src/cython/doc/witness_complex_ref.rst deleted file mode 100644 index 9987d3fd..00000000 --- a/src/cython/doc/witness_complex_ref.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -================================ -Witness complex reference manual -================================ - -.. autoclass:: gudhi.WitnessComplex - :members: - :undoc-members: - :show-inheritance: - - .. automethod:: gudhi.WitnessComplex.__init__ diff --git a/src/cython/doc/witness_complex_sum.inc b/src/cython/doc/witness_complex_sum.inc deleted file mode 100644 index 2be8b220..00000000 --- a/src/cython/doc/witness_complex_sum.inc +++ /dev/null @@ -1,18 +0,0 @@ -.. table:: - :widths: 30 50 20 - - +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ - | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich | - | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | - | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | - | :figclass: align-center | The data structure is described in | | - | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 `_ for Euclidean versions only) | - | | | | - | | | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 for Euclidean versions only | - +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ - | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | - | | * :doc:`strong_witness_complex_ref` | - | | * :doc:`euclidean_witness_complex_ref` | - | | * :doc:`euclidean_strong_witness_complex_ref` | - +-------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ - diff --git a/src/cython/doc/witness_complex_user.rst b/src/cython/doc/witness_complex_user.rst deleted file mode 100644 index 40e94134..00000000 --- a/src/cython/doc/witness_complex_user.rst +++ /dev/null @@ -1,135 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -Witness complex user manual -=========================== - -.. include:: witness_complex_sum.inc - -Definitions ------------ - -Witness complex is a simplicial complex defined on two sets of points in :math:`\mathbb{R}^D`: - -- :math:`W` set of **witnesses** and -- :math:`L` set of **landmarks**. - -Even though often the set of landmarks :math:`L` is a subset of the set of witnesses :math:`W`, it is not a requirement -for the current implementation. - -Landmarks are the vertices of the simplicial complex and witnesses help to decide on which simplices are inserted via a -predicate "is witnessed". - -De Silva and Carlsson in their paper :cite:`de2004topological` differentiate **weak witnessing** and -**strong witnessing**: - -- *weak*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l) \leq d(w,l')` -- *strong*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l) \leq d(w,l')` - -where :math:`d(.,.)` is a distance function. - -Both definitions can be relaxed by a real value :math:`\alpha`: - -- *weak*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2` -- *strong*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2` - -which leads to definitions of **weak relaxed witness complex** (or just relaxed witness complex for short) and -**strong relaxed witness complex** respectively. - -.. figure:: ../../doc/Witness_complex/swit.svg - :alt: Strongly witnessed simplex - :figclass: align-center - - Strongly witnessed simplex - - -In particular case of 0-relaxation, weak complex corresponds to **witness complex** introduced in -:cite:`de2004topological`, whereas 0-relaxed strong witness complex consists of just vertices and is not very -interesting. Hence for small relaxation weak version is preferable. -However, to capture the homotopy type (for example using Gudhi::persistent_cohomology::Persistent_cohomology) it is -often necessary to work with higher filtration values. In this case strong relaxed witness complex is faster to compute -and offers similar results. - -Implementation --------------- - -The two complexes described above are implemented in the corresponding classes - -- :doc:`witness_complex_ref` -- :doc:`strong_witness_complex_ref` -- :doc:`euclidean_witness_complex_ref` -- :doc:`euclidean_strong_witness_complex_ref` - -The construction of the Euclidean versions of complexes follow the same scheme: - -1. Construct a search tree on landmarks. -2. Construct lists of nearest landmarks for each witness. -3. Construct the witness complex for nearest landmark lists. - -In the non-Euclidean classes, the lists of nearest landmarks are supposed to be given as input. - -The constructors take on the steps 1 and 2, while the function 'create_complex' executes the step 3. - -Constructing weak relaxed witness complex from an off file ----------------------------------------------------------- - -Let's start with a simple example, which reads an off point file and computes a weak witness complex. - -.. code-block:: python - - import gudhi - import argparse - - parser = argparse.ArgumentParser(description='EuclideanWitnessComplex creation from ' - 'points read in a OFF file.', - epilog='Example: ' - 'example/witness_complex_diagram_persistence_from_off_file_example.py ' - '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2' - '- Constructs a alpha complex with the ' - 'points from the given OFF file.') - parser.add_argument("-f", "--file", type=str, required=True) - parser.add_argument("-a", "--max_alpha_square", type=float, required=True) - parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) - parser.add_argument("-d", "--limit_dimension", type=int, required=True) - - args = parser.parse_args() - - with open(args.file, 'r') as f: - first_line = f.readline() - if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): - print("#####################################################################") - print("EuclideanWitnessComplex creation from points read in a OFF file") - - witnesses = gudhi.read_off(off_file=args.file) - landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks) - - message = "EuclideanWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \ - " - Number of landmarks=" + repr(args.number_of_landmarks) - print(message) - - witness_complex = gudhi.EuclideanWitnessComplex(witnesses=witnesses, landmarks=landmarks) - simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square, - limit_dimension=args.limit_dimension) - - message = "Number of simplices=" + repr(simplex_tree.num_simplices()) - print(message) - else: - print(args.file, "is not a valid OFF file") - - f.close() - - -Example2: Computing persistence using strong relaxed witness complex --------------------------------------------------------------------- - -Here is an example of constructing a strong witness complex filtration and computing persistence on it: - -* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` - -Bibliography -============ - -.. bibliography:: ../../biblio/bibliography.bib - :filter: docnames - :style: unsrt diff --git a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py deleted file mode 100755 index b8f283b3..00000000 --- a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="AlphaComplex creation from " "points read in a OFF file.", - epilog="Example: " - "example/alpha_complex_diagram_persistence_from_off_file_example.py " - "-f ../data/points/tore3D_300.off -a 0.6" - "- Constructs a alpha complex with the " - "points from the given OFF file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5) -parser.add_argument("-b", "--band", type=float, default=0.0) -parser.add_argument( - "--no-diagram", - default=False, - action="store_true", - help="Flag for not to display the diagrams", -) - -args = parser.parse_args() - -with open(args.file, "r") as f: - first_line = f.readline() - if (first_line == "OFF\n") or (first_line == "nOFF\n"): - print("#####################################################################") - print("AlphaComplex creation from points read in a OFF file") - - message = "AlphaComplex with max_edge_length=" + repr(args.max_alpha_square) - print(message) - - alpha_complex = gudhi.AlphaComplex(off_file=args.file) - simplex_tree = alpha_complex.create_simplex_tree( - max_alpha_square=args.max_alpha_square - ) - - message = "Number of simplices=" + repr(simplex_tree.num_simplices()) - print(message) - - diag = simplex_tree.persistence() - - print("betti_numbers()=") - print(simplex_tree.betti_numbers()) - - if args.no_diagram == False: - pplot = gudhi.plot_persistence_diagram(diag, band=args.band) - pplot.show() - else: - print(args.file, "is not a valid OFF file") - - f.close() diff --git a/src/cython/example/alpha_complex_from_points_example.py b/src/cython/example/alpha_complex_from_points_example.py deleted file mode 100755 index a746998c..00000000 --- a/src/cython/example/alpha_complex_from_points_example.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python - -from gudhi import AlphaComplex, SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -print("#####################################################################") -print("AlphaComplex creation from points") -alpha_complex = AlphaComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]]) -simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0) - -if simplex_tree.find([0, 1]): - print("[0, 1] Found !!") -else: - print("[0, 1] Not found...") - -if simplex_tree.find([4]): - print("[4] Found !!") -else: - print("[4] Not found...") - -if simplex_tree.insert([0, 1, 2], filtration=4.0): - print("[0, 1, 2] Inserted !!") -else: - print("[0, 1, 2] Not inserted...") - -if simplex_tree.insert([0, 1, 4], filtration=4.0): - print("[0, 1, 4] Inserted !!") -else: - print("[0, 1, 4] Not inserted...") - -if simplex_tree.find([4]): - print("[4] Found !!") -else: - print("[4] Not found...") - -print("dimension=", simplex_tree.dimension()) -print("filtrations=", simplex_tree.get_filtration()) -print("star([0])=", simplex_tree.get_star([0])) -print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1)) - -print("point[0]=", alpha_complex.get_point(0)) -print("point[5]=", alpha_complex.get_point(5)) diff --git a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py deleted file mode 100755 index 086307ee..00000000 --- a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse -import math - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="AlphaComplex and RipsComplex " - "persistence creation from points read in " - "a OFF file. Bottleneck distance computation" - " on each dimension", - epilog="Example: " - "example/alpha_rips_persistence_bottleneck_distance.py " - "-f ../data/points/tore3D_1307.off -t 0.15 -d 3", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-t", "--threshold", type=float, default=0.5) -parser.add_argument("-d", "--max_dimension", type=int, default=1) - -args = parser.parse_args() -with open(args.file, "r") as f: - first_line = f.readline() - if (first_line == "OFF\n") or (first_line == "nOFF\n"): - point_cloud = gudhi.read_off(off_file=args.file) - print("#####################################################################") - print("RipsComplex creation from points read in a OFF file") - - message = "RipsComplex with max_edge_length=" + repr(args.threshold) - print(message) - - rips_complex = gudhi.RipsComplex( - points=point_cloud, max_edge_length=args.threshold - ) - - rips_stree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) - - message = "Number of simplices=" + repr(rips_stree.num_simplices()) - print(message) - - rips_diag = rips_stree.persistence() - - print("#####################################################################") - print("AlphaComplex creation from points read in a OFF file") - - message = "AlphaComplex with max_edge_length=" + repr(args.threshold) - print(message) - - alpha_complex = gudhi.AlphaComplex(points=point_cloud) - alpha_stree = alpha_complex.create_simplex_tree( - max_alpha_square=(args.threshold * args.threshold) - ) - - message = "Number of simplices=" + repr(alpha_stree.num_simplices()) - print(message) - - alpha_diag = alpha_stree.persistence() - - max_b_distance = 0.0 - for dim in range(args.max_dimension): - # Alpha persistence values needs to be transform because filtration - # values are alpha square values - funcs = [math.sqrt, math.sqrt] - alpha_intervals = [] - for interval in alpha_stree.persistence_intervals_in_dimension(dim): - alpha_intervals.append( - map(lambda func, value: func(value), funcs, interval) - ) - - rips_intervals = rips_stree.persistence_intervals_in_dimension(dim) - bottleneck_distance = gudhi.bottleneck_distance( - rips_intervals, alpha_intervals - ) - message = ( - "In dimension " - + repr(dim) - + ", bottleneck distance = " - + repr(bottleneck_distance) - ) - print(message) - max_b_distance = max(bottleneck_distance, max_b_distance) - - print( - "================================================================================" - ) - message = "Bottleneck distance is " + repr(max_b_distance) - print(message) - - else: - print(args.file, "is not a valid OFF file") - - f.close() diff --git a/src/cython/example/bottleneck_basic_example.py b/src/cython/example/bottleneck_basic_example.py deleted file mode 100755 index 392d2a6e..00000000 --- a/src/cython/example/bottleneck_basic_example.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Francois Godi, Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]] - -diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]] - -message = "diag1=" + repr(diag1) -print(message) - -message = "diag2=" + repr(diag2) -print(message) - -message = "Bottleneck distance approximation=" + repr( - gudhi.bottleneck_distance(diag1, diag2, 0.1) -) -print(message) - -message = "Bottleneck distance exact value=" + repr( - gudhi.bottleneck_distance(diag1, diag2) -) -print(message) diff --git a/src/cython/example/coordinate_graph_induced_complex.py b/src/cython/example/coordinate_graph_induced_complex.py deleted file mode 100755 index e32141b4..00000000 --- a/src/cython/example/coordinate_graph_induced_complex.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="Coordinate GIC " "from points read in a OFF file.", - epilog="Example: " - "example/coordinate_graph_induced_complex.py " - "-f ../data/points/KleinBottle5D.off -c 0 -v" - "- Constructs the coordinate GIC with the " - "points from the given OFF file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-c", "--coordinate", type=int, default=0) -parser.add_argument( - "-v", - "--verbose", - default=False, - action="store_true", - help="Flag for program verbosity", -) - -args = parser.parse_args() - -nerve_complex = gudhi.CoverComplex() -nerve_complex.set_verbose(args.verbose) - -if nerve_complex.read_point_cloud(args.file): - nerve_complex.set_type("GIC") - nerve_complex.set_color_from_coordinate(args.coordinate) - nerve_complex.set_function_from_coordinate(args.coordinate) - nerve_complex.set_graph_from_automatic_rips() - nerve_complex.set_automatic_resolution() - nerve_complex.set_gain() - nerve_complex.set_cover_from_function() - nerve_complex.find_simplices() - nerve_complex.plot_dot() - simplex_tree = nerve_complex.create_simplex_tree() - nerve_complex.compute_PD() - if args.verbose: - print("Iterator on coordinate GIC simplices") - result_str = ( - "Coordinate GIC is of dimension " - + repr(simplex_tree.dimension()) - + " - " - + repr(simplex_tree.num_simplices()) - + " simplices - " - + repr(simplex_tree.num_vertices()) - + " vertices." - ) - print(result_str) - for filtered_value in simplex_tree.get_filtration(): - print(filtered_value[0]) diff --git a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py deleted file mode 100755 index 610ba44f..00000000 --- a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="EuclideanStrongWitnessComplex creation from " - "points read in a OFF file.", - epilog="Example: " - "example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py " - "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2" - "- Constructs a strong witness complex with the " - "points from the given OFF file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-a", "--max_alpha_square", type=float, required=True) -parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) -parser.add_argument("-d", "--limit_dimension", type=int, required=True) -parser.add_argument("-b", "--band", type=float, default=0.0) -parser.add_argument( - "--no-diagram", - default=False, - action="store_true", - help="Flag for not to display the diagrams", -) - -args = parser.parse_args() - -with open(args.file, "r") as f: - first_line = f.readline() - if (first_line == "OFF\n") or (first_line == "nOFF\n"): - print("#####################################################################") - print("EuclideanStrongWitnessComplex creation from points read in a OFF file") - - witnesses = gudhi.read_off(off_file=args.file) - landmarks = gudhi.pick_n_random_points( - points=witnesses, nb_points=args.number_of_landmarks - ) - - message = ( - "EuclideanStrongWitnessComplex with max_edge_length=" - + repr(args.max_alpha_square) - + " - Number of landmarks=" - + repr(args.number_of_landmarks) - ) - print(message) - - witness_complex = gudhi.EuclideanStrongWitnessComplex( - witnesses=witnesses, landmarks=landmarks - ) - simplex_tree = witness_complex.create_simplex_tree( - max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension - ) - - message = "Number of simplices=" + repr(simplex_tree.num_simplices()) - print(message) - - diag = simplex_tree.persistence() - - print("betti_numbers()=") - print(simplex_tree.betti_numbers()) - - if args.no_diagram == False: - pplot = gudhi.plot_persistence_diagram(diag, band=args.band) - pplot.show() - else: - print(args.file, "is not a valid OFF file") - - f.close() diff --git a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py deleted file mode 100755 index 7587b732..00000000 --- a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="EuclideanWitnessComplex creation from " "points read in a OFF file.", - epilog="Example: " - "example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py " - "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2" - "- Constructs a weak witness complex with the " - "points from the given OFF file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-a", "--max_alpha_square", type=float, required=True) -parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) -parser.add_argument("-d", "--limit_dimension", type=int, required=True) -parser.add_argument("-b", "--band", type=float, default=0.0) -parser.add_argument( - "--no-diagram", - default=False, - action="store_true", - help="Flag for not to display the diagrams", -) - -args = parser.parse_args() - -with open(args.file, "r") as f: - first_line = f.readline() - if (first_line == "OFF\n") or (first_line == "nOFF\n"): - print("#####################################################################") - print("EuclideanWitnessComplex creation from points read in a OFF file") - - witnesses = gudhi.read_off(off_file=args.file) - landmarks = gudhi.pick_n_random_points( - points=witnesses, nb_points=args.number_of_landmarks - ) - - message = ( - "EuclideanWitnessComplex with max_edge_length=" - + repr(args.max_alpha_square) - + " - Number of landmarks=" - + repr(args.number_of_landmarks) - ) - print(message) - - witness_complex = gudhi.EuclideanWitnessComplex( - witnesses=witnesses, landmarks=landmarks - ) - simplex_tree = witness_complex.create_simplex_tree( - max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension - ) - - message = "Number of simplices=" + repr(simplex_tree.num_simplices()) - print(message) - - diag = simplex_tree.persistence() - - print("betti_numbers()=") - print(simplex_tree.betti_numbers()) - - if args.no_diagram == False: - pplot = gudhi.plot_persistence_diagram(diag, band=args.band) - pplot.show() - else: - print(args.file, "is not a valid OFF file") - - f.close() diff --git a/src/cython/example/functional_graph_induced_complex.py b/src/cython/example/functional_graph_induced_complex.py deleted file mode 100755 index 8b645040..00000000 --- a/src/cython/example/functional_graph_induced_complex.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="Functional GIC " "from points read in a OFF file.", - epilog="Example: " - "example/functional_graph_induced_complex.py " - "-o ../data/points/COIL_database/lucky_cat.off " - "-f ../data/points/COIL_database/lucky_cat_PCA1" - "- Constructs the functional GIC with the " - "points from the given OFF and function files.", -) -parser.add_argument("-o", "--off-file", type=str, required=True) -parser.add_argument("-f", "--function-file", type=str, required=True) -parser.add_argument( - "-v", - "--verbose", - default=False, - action="store_true", - help="Flag for program verbosity", -) - -args = parser.parse_args() - -nerve_complex = gudhi.CoverComplex() -nerve_complex.set_verbose(args.verbose) - -if nerve_complex.read_point_cloud(args.off_file): - nerve_complex.set_type("GIC") - nerve_complex.set_color_from_file(args.function_file) - nerve_complex.set_function_from_file(args.function_file) - nerve_complex.set_graph_from_automatic_rips() - nerve_complex.set_automatic_resolution() - nerve_complex.set_gain() - nerve_complex.set_cover_from_function() - nerve_complex.find_simplices() - nerve_complex.plot_dot() - simplex_tree = nerve_complex.create_simplex_tree() - nerve_complex.compute_PD() - if args.verbose: - print("Iterator on functional GIC simplices") - result_str = ( - "Functional GIC is of dimension " - + repr(simplex_tree.dimension()) - + " - " - + repr(simplex_tree.num_simplices()) - + " simplices - " - + repr(simplex_tree.num_vertices()) - + " vertices." - ) - print(result_str) - for filtered_value in simplex_tree.get_filtration(): - print(filtered_value[0]) diff --git a/src/cython/example/gudhi_graphical_tools_example.py b/src/cython/example/gudhi_graphical_tools_example.py deleted file mode 100755 index 3b0ca54d..00000000 --- a/src/cython/example/gudhi_graphical_tools_example.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python - -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -print("#####################################################################") -print("Show barcode persistence example") - -persistence = [ - (2, (1.0, float("inf"))), - (1, (1.4142135623730951, float("inf"))), - (1, (1.4142135623730951, float("inf"))), - (0, (0.0, float("inf"))), - (0, (0.0, 1.0)), - (0, (0.0, 1.0)), - (0, (0.0, 1.0)), -] -gudhi.plot_persistence_barcode(persistence) - -print("#####################################################################") -print("Show diagram persistence example") - -pplot = gudhi.plot_persistence_diagram(persistence) -pplot.show() - -print("#####################################################################") -print("Show diagram persistence example with a confidence band") - -pplot = gudhi.plot_persistence_diagram(persistence, band=0.2) -pplot.show() diff --git a/src/cython/example/nerve_of_a_covering.py b/src/cython/example/nerve_of_a_covering.py deleted file mode 100755 index 3c8e0f90..00000000 --- a/src/cython/example/nerve_of_a_covering.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="Nerve of a covering creation " "from points read in a OFF file.", - epilog="Example: " - "example/nerve_of_a_covering.py " - "-f ../data/points/human.off -c 2 -r 10 -g 0.3" - "- Constructs Nerve of a covering with the " - "points from the given OFF file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-c", "--coordinate", type=int, default=0) -parser.add_argument("-r", "--resolution", type=int, default=10) -parser.add_argument("-g", "--gain", type=float, default=0.3) -parser.add_argument( - "-v", - "--verbose", - default=False, - action="store_true", - help="Flag for program verbosity", -) - -args = parser.parse_args() - -nerve_complex = gudhi.CoverComplex() -nerve_complex.set_verbose(args.verbose) - -if nerve_complex.read_point_cloud(args.file): - nerve_complex.set_type("Nerve") - nerve_complex.set_color_from_coordinate(args.coordinate) - nerve_complex.set_function_from_coordinate(args.coordinate) - nerve_complex.set_graph_from_OFF() - nerve_complex.set_resolution_with_interval_number(args.resolution) - nerve_complex.set_gain(args.gain) - nerve_complex.set_cover_from_function() - nerve_complex.find_simplices() - nerve_complex.write_info() - simplex_tree = nerve_complex.create_simplex_tree() - nerve_complex.compute_PD() - if args.verbose: - print("Iterator on graph induced complex simplices") - result_str = ( - "Nerve is of dimension " - + repr(simplex_tree.dimension()) - + " - " - + repr(simplex_tree.num_simplices()) - + " simplices - " - + repr(simplex_tree.num_vertices()) - + " vertices." - ) - print(result_str) - for filtered_value in simplex_tree.get_filtration(): - print(filtered_value[0]) diff --git a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py deleted file mode 100755 index 9cb855cd..00000000 --- a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def is_file_perseus(file): - num_lines = open(file).read().count("\n") - try: - f = open(file) - num_dim = int(f.readline()) - coeff = 1 - for dim in range(0, num_dim): - try: - line = int(f.readline()) - coeff *= abs(line) - except ValueError: - return False - if num_lines == (1 + num_dim + coeff): - return True - else: - return False - except ValueError: - return False - - -parser = argparse.ArgumentParser( - description="Periodic cubical complex from a " "Perseus-style file name.", - epilog="Example: " - "./periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py" - " -f ../data/bitmap/CubicalTwoSphere.txt", -) - -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument( - "--no-barcode", - default=False, - action="store_true", - help="Flag for not to display the barcodes", -) - -args = parser.parse_args() - -if is_file_perseus(args.file): - print("#####################################################################") - print("PeriodicCubicalComplex creation") - periodic_cubical_complex = gudhi.PeriodicCubicalComplex(perseus_file=args.file) - - print("persistence(homology_coeff_field=3, min_persistence=0)=") - diag = periodic_cubical_complex.persistence( - homology_coeff_field=3, min_persistence=0 - ) - print(diag) - - print("betti_numbers()=") - print(periodic_cubical_complex.betti_numbers()) - if args.no_barcode == False: - gudhi.plot_persistence_barcode(diag) -else: - print(args.file, "is not a valid perseus style file") diff --git a/src/cython/example/random_cubical_complex_persistence_example.py b/src/cython/example/random_cubical_complex_persistence_example.py deleted file mode 100755 index da0eb177..00000000 --- a/src/cython/example/random_cubical_complex_persistence_example.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import numpy -from functools import reduce -import argparse -import operator - - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="Random cubical complex.", - epilog="Example: " - "./random_cubical_complex_persistence_example.py" - " 10 10 10 - Constructs a random cubical " - "complex in a dimension [10, 10, 10] (aka. " - "1000 random top dimensional cells).", -) -parser.add_argument("dimension", type=int, nargs="*", help="Cubical complex dimensions") - -args = parser.parse_args() -dimension_multiplication = reduce(operator.mul, args.dimension, 1) - -if dimension_multiplication > 1: - print("#####################################################################") - print("CubicalComplex creation") - cubical_complex = gudhi.CubicalComplex( - dimensions=args.dimension, - top_dimensional_cells=numpy.random.rand(dimension_multiplication), - ) - - print("persistence(homology_coeff_field=2, min_persistence=0)=") - print(cubical_complex.persistence(homology_coeff_field=2, min_persistence=0)) - - print("betti_numbers()=") - print(cubical_complex.betti_numbers()) diff --git a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py deleted file mode 100755 index 3571580b..00000000 --- a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import sys -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2017 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2017 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="RipsComplex creation from " "a correlation matrix read in a csv file.", - epilog="Example: " - "example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py " - "-f ../data/correlation_matrix/lower_triangular_correlation_matrix.csv -e 12.0 -d 3" - "- Constructs a Rips complex with the " - "correlation matrix from the given csv file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-c", "--min_edge_correlation", type=float, default=0.5) -parser.add_argument("-d", "--max_dimension", type=int, default=1) -parser.add_argument("-b", "--band", type=float, default=0.0) -parser.add_argument( - "--no-diagram", - default=False, - action="store_true", - help="Flag for not to display the diagrams", -) - -args = parser.parse_args() - -if not (-1.0 < args.min_edge_correlation < 1.0): - print("Wrong value of the treshold corelation (should be between -1 and 1).") - sys.exit(1) - -print("#####################################################################") -print("Caution: as persistence diagrams points will be under the diagonal,") -print("bottleneck distance and persistence graphical tool will not work") -print("properly, this is a known issue.") - -print("#####################################################################") -print("RipsComplex creation from correlation matrix read in a csv file") - -message = "RipsComplex with min_edge_correlation=" + repr(args.min_edge_correlation) -print(message) - -correlation_matrix = gudhi.read_lower_triangular_matrix_from_csv_file( - csv_file=args.file -) -# Given a correlation matrix M, we compute component-wise M'[i,j] = 1-M[i,j] to get a distance matrix: -distance_matrix = [ - [1.0 - correlation_matrix[i][j] for j in range(len(correlation_matrix[i]))] - for i in range(len(correlation_matrix)) -] - -rips_complex = gudhi.RipsComplex( - distance_matrix=distance_matrix, max_edge_length=1.0 - args.min_edge_correlation -) -simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) - -message = "Number of simplices=" + repr(simplex_tree.num_simplices()) -print(message) - -diag = simplex_tree.persistence() - -print("betti_numbers()=") -print(simplex_tree.betti_numbers()) - -# invert the persistence diagram -invert_diag = [ - (diag[pers][0], (1.0 - diag[pers][1][0], 1.0 - diag[pers][1][1])) - for pers in range(len(diag)) -] - -if args.no_diagram == False: - pplot = gudhi.plot_persistence_diagram(invert_diag, band=args.band) - pplot.show() diff --git a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py deleted file mode 100755 index 0b9a9ba9..00000000 --- a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="RipsComplex creation from " "a distance matrix read in a csv file.", - epilog="Example: " - "example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py " - "-f ../data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3" - "- Constructs a Rips complex with the " - "distance matrix from the given csv file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-e", "--max_edge_length", type=float, default=0.5) -parser.add_argument("-d", "--max_dimension", type=int, default=1) -parser.add_argument("-b", "--band", type=float, default=0.0) -parser.add_argument( - "--no-diagram", - default=False, - action="store_true", - help="Flag for not to display the diagrams", -) - -args = parser.parse_args() - -print("#####################################################################") -print("RipsComplex creation from distance matrix read in a csv file") - -message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length) -print(message) - -distance_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=args.file) -rips_complex = gudhi.RipsComplex( - distance_matrix=distance_matrix, max_edge_length=args.max_edge_length -) -simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) - -message = "Number of simplices=" + repr(simplex_tree.num_simplices()) -print(message) - -diag = simplex_tree.persistence() - -print("betti_numbers()=") -print(simplex_tree.betti_numbers()) - -if args.no_diagram == False: - pplot = gudhi.plot_persistence_diagram(diag, band=args.band) - pplot.show() diff --git a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py deleted file mode 100755 index 2b335bba..00000000 --- a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="RipsComplex creation from " "points read in a OFF file.", - epilog="Example: " - "example/rips_complex_diagram_persistence_from_off_file_example.py " - "-f ../data/points/tore3D_300.off -a 0.6" - "- Constructs a Rips complex with the " - "points from the given OFF file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-e", "--max_edge_length", type=float, default=0.5) -parser.add_argument("-d", "--max_dimension", type=int, default=1) -parser.add_argument("-b", "--band", type=float, default=0.0) -parser.add_argument( - "--no-diagram", - default=False, - action="store_true", - help="Flag for not to display the diagrams", -) - -args = parser.parse_args() - -with open(args.file, "r") as f: - first_line = f.readline() - if (first_line == "OFF\n") or (first_line == "nOFF\n"): - print("#####################################################################") - print("RipsComplex creation from points read in a OFF file") - - message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length) - print(message) - - point_cloud = gudhi.read_off(off_file=args.file) - rips_complex = gudhi.RipsComplex( - points=point_cloud, max_edge_length=args.max_edge_length - ) - simplex_tree = rips_complex.create_simplex_tree( - max_dimension=args.max_dimension - ) - - message = "Number of simplices=" + repr(simplex_tree.num_simplices()) - print(message) - - diag = simplex_tree.persistence() - - print("betti_numbers()=") - print(simplex_tree.betti_numbers()) - - if args.no_diagram == False: - pplot = gudhi.plot_persistence_diagram(diag, band=args.band) - pplot.show() - else: - print(args.file, "is not a valid OFF file") - - f.close() diff --git a/src/cython/example/rips_complex_from_points_example.py b/src/cython/example/rips_complex_from_points_example.py deleted file mode 100755 index 59d8a261..00000000 --- a/src/cython/example/rips_complex_from_points_example.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python - -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -print("#####################################################################") -print("RipsComplex creation from points") -rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42) - -simplex_tree = rips.create_simplex_tree(max_dimension=1) - -print("filtrations=", simplex_tree.get_filtration()) -print("star([0])=", simplex_tree.get_star([0])) -print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1)) diff --git a/src/cython/example/rips_persistence_diagram.py b/src/cython/example/rips_persistence_diagram.py deleted file mode 100755 index f5897d7b..00000000 --- a/src/cython/example/rips_persistence_diagram.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python - -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Marc Glisse" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -print("#####################################################################") -print("RipsComplex creation from points") -rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42) - -simplex_tree = rips.create_simplex_tree(max_dimension=1) - - -diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0) -print("diag=", diag) - -pplot = gudhi.plot_persistence_diagram(diag) -pplot.show() diff --git a/src/cython/example/simplex_tree_example.py b/src/cython/example/simplex_tree_example.py deleted file mode 100755 index 30de00da..00000000 --- a/src/cython/example/simplex_tree_example.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python - -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -print("#####################################################################") -print("SimplexTree creation from insertion") - -st = gudhi.SimplexTree() - -if st.insert([0, 1]): - print("Inserted !!") -else: - print("Not inserted...") - -if st.find([0, 1]): - print("Found !!") -else: - print("Not found...") - -if st.insert([0, 1, 2], filtration=4.0): - print("Inserted !!") -else: - print("Not inserted...") - -print("dimension=", st.dimension()) - -st.initialize_filtration() -print("filtration=", st.get_filtration()) -print("filtration[1, 2]=", st.filtration([1, 2])) -print("filtration[4, 2]=", st.filtration([4, 2])) - -print("num_simplices=", st.num_simplices()) -print("num_vertices=", st.num_vertices()) - -print("skeleton[2]=", st.get_skeleton(2)) -print("skeleton[1]=", st.get_skeleton(1)) -print("skeleton[0]=", st.get_skeleton(0)) diff --git a/src/cython/example/sparse_rips_persistence_diagram.py b/src/cython/example/sparse_rips_persistence_diagram.py deleted file mode 100755 index 671d5e34..00000000 --- a/src/cython/example/sparse_rips_persistence_diagram.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Marc Glisse" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "MIT" - -print("#####################################################################") -print("Sparse RipsComplex creation from points") -rips = gudhi.RipsComplex( - points=[[0, 0], [0, 0.1], [1, 0], [0, 1], [1, 1]], max_edge_length=42, sparse=0.5 -) - -simplex_tree = rips.create_simplex_tree(max_dimension=2) - - -diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0) -print("diag=", diag) - -pplot = gudhi.plot_persistence_diagram(diag) -pplot.show() diff --git a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py deleted file mode 100755 index 456bc9eb..00000000 --- a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="TangentialComplex creation from " "points read in a OFF file.", - epilog="Example: " - "example/tangential_complex_plain_homology_from_off_file_example.py " - "-f ../data/points/tore3D_300.off -i 3" - "- Constructs a tangential complex with the " - "points from the given OFF file", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-i", "--intrisic_dim", type=int, required=True) -parser.add_argument("-b", "--band", type=float, default=0.0) -parser.add_argument( - "--no-diagram", - default=False, - action="store_true", - help="Flag for not to display the diagrams", -) - -args = parser.parse_args() - -with open(args.file, "r") as f: - first_line = f.readline() - if (first_line == "OFF\n") or (first_line == "nOFF\n"): - print("#####################################################################") - print("TangentialComplex creation from points read in a OFF file") - - tc = gudhi.TangentialComplex(intrisic_dim=args.intrisic_dim, off_file=args.file) - tc.compute_tangential_complex() - st = tc.create_simplex_tree() - - message = "Number of simplices=" + repr(st.num_simplices()) - print(message) - - diag = st.persistence(persistence_dim_max=True) - - print("betti_numbers()=") - print(st.betti_numbers()) - - if args.no_diagram == False: - pplot = gudhi.plot_persistence_diagram(diag, band=args.band) - pplot.show() - else: - print(args.file, "is not a valid OFF file") - - f.close() diff --git a/src/cython/example/voronoi_graph_induced_complex.py b/src/cython/example/voronoi_graph_induced_complex.py deleted file mode 100755 index 38be6c92..00000000 --- a/src/cython/example/voronoi_graph_induced_complex.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python - -import gudhi -import argparse - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "MIT" - -parser = argparse.ArgumentParser( - description="Voronoi GIC " "from points read in a OFF file.", - epilog="Example: " - "example/voronoi_graph_induced_complex.py " - "-f ../data/points/human.off -n 700 -v" - "- Constructs the Voronoi GIC with the " - "points from the given OFF file.", -) -parser.add_argument("-f", "--file", type=str, required=True) -parser.add_argument("-n", "--subsample-nb-points", type=int, default=100) -parser.add_argument( - "-v", - "--verbose", - default=False, - action="store_true", - help="Flag for program verbosity", -) - -args = parser.parse_args() - -nerve_complex = gudhi.CoverComplex() -nerve_complex.set_verbose(args.verbose) - -if nerve_complex.read_point_cloud(args.file): - nerve_complex.set_type("GIC") - nerve_complex.set_color_from_coordinate() - nerve_complex.set_graph_from_OFF() - nerve_complex.set_cover_from_Voronoi(args.subsample_nb_points) - nerve_complex.find_simplices() - nerve_complex.plot_off() - simplex_tree = nerve_complex.create_simplex_tree() - nerve_complex.compute_PD() - if args.verbose: - print("Iterator on graph induced complex simplices") - result_str = ( - "Graph induced complex is of dimension " - + repr(simplex_tree.dimension()) - + " - " - + repr(simplex_tree.num_simplices()) - + " simplices - " - + repr(simplex_tree.num_vertices()) - + " vertices." - ) - print(result_str) - for filtered_value in simplex_tree.get_filtration(): - print(filtered_value[0]) diff --git a/src/cython/example/witness_complex_from_nearest_landmark_table.py b/src/cython/example/witness_complex_from_nearest_landmark_table.py deleted file mode 100755 index c04a82b2..00000000 --- a/src/cython/example/witness_complex_from_nearest_landmark_table.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python - -from gudhi import StrongWitnessComplex, SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -print("#####################################################################") -print("WitnessComplex creation from nearest landmark table") -nearest_landmark_table = [ - [[0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]], - [[1, 0.0], [2, 0.1], [3, 0.2], [4, 0.3], [0, 0.4]], - [[2, 0.0], [3, 0.1], [4, 0.2], [0, 0.3], [1, 0.4]], - [[3, 0.0], [4, 0.1], [0, 0.2], [1, 0.3], [2, 0.4]], - [[4, 0.0], [0, 0.1], [1, 0.2], [2, 0.3], [3, 0.4]], -] - -witness_complex = StrongWitnessComplex(nearest_landmark_table=nearest_landmark_table) -simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=0.41) - -message = "Number of simplices: " + repr(simplex_tree.num_simplices()) -print(message) - -diag = simplex_tree.persistence(min_persistence=-0.1, homology_coeff_field=11) -print(diag) diff --git a/src/cython/gudhi/__init__.py b/src/cython/gudhi/__init__.py deleted file mode 100644 index fde749eb..00000000 --- a/src/cython/gudhi/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Fake empty __init__.py for cython to accept this directory as a Python package diff --git a/src/cython/gudhi/__init__.py.in b/src/cython/gudhi/__init__.py.in deleted file mode 100644 index b2d2d3d7..00000000 --- a/src/cython/gudhi/__init__.py.in +++ /dev/null @@ -1,40 +0,0 @@ -from importlib import import_module - -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "GUDHI Editorial Board" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "https://gudhi.inria.fr/licensing/" -__version__ = "@GUDHI_VERSION@" -# This variable is used by doctest to find files -__root_source_dir__ = "@CMAKE_SOURCE_DIR@" -__debug_info__ = @GUDHI_CYTHON_DEBUG_INFO@ - -from sys import exc_info -from importlib import import_module - -__all__ = [@GUDHI_CYTHON_MODULES@] - -__available_modules__ = '' -__missing_modules__ = '' - -# try to import * from gudhi.__module_name__ -for __module_name__ in __all__: - try: - __module__ = import_module('gudhi.' + __module_name__) - try: - __to_import__ = __module__.__all__ - except AttributeError: - __to_import__ = [name for name in __module__.__dict__ if not name.startswith('_')] - globals().update({name: __module__.__dict__[name] for name in __to_import__}) - __available_modules__ += __module_name__ + ";" - except: - __missing_modules__ += __module_name__ + ";" diff --git a/src/cython/gudhi/alpha_complex.pyx b/src/cython/gudhi/alpha_complex.pyx deleted file mode 100644 index 6d6309db..00000000 --- a/src/cython/gudhi/alpha_complex.pyx +++ /dev/null @@ -1,116 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -from libc.stdint cimport intptr_t -import os - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": - cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": - Alpha_complex_interface(vector[vector[double]] points) - # bool from_file is a workaround for cython to find the correct signature - Alpha_complex_interface(string off_file, bool from_file) - vector[double] get_point(int vertex) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - -# AlphaComplex python interface -cdef class AlphaComplex: - """AlphaComplex is a simplicial complex constructed from the finite cells - of a Delaunay Triangulation. - - The filtration value of each simplex is computed as the square of the - circumradius of the simplex if the circumsphere is empty (the simplex is - then said to be Gabriel), and as the minimum of the filtration values of - the codimension 1 cofaces that make it not Gabriel otherwise. - - All simplices that have a filtration value strictly greater than a given - alpha squared value are not inserted into the complex. - - .. note:: - - When Alpha_complex is constructed with an infinite value of alpha, the - complex is a Delaunay complex. - - """ - - cdef Alpha_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, points=None, off_file=''): - """AlphaComplex constructor. - - :param points: A list of points in d-Dimension. - :type points: list of list of double - - Or - - :param off_file: An OFF file style name. - :type off_file: string - """ - - # The real cython constructor - def __cinit__(self, points=None, off_file=''): - if off_file is not '': - if os.path.isfile(off_file): - self.thisptr = new Alpha_complex_interface(str.encode(off_file), True) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty Alpha construction - points=[] - self.thisptr = new Alpha_complex_interface(points) - - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if AlphaComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int - :rtype: list of float - :returns: the point. - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - - def create_simplex_tree(self, max_alpha_square=float('inf')): - """ - :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity, and - there is very little point using anything else since it does - not save time. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - self.thisptr.create_simplex_tree(stree_int_ptr, max_alpha_square) - return stree diff --git a/src/cython/gudhi/bottleneck.pyx b/src/cython/gudhi/bottleneck.pyx deleted file mode 100644 index 4b378cbc..00000000 --- a/src/cython/gudhi/bottleneck.pyx +++ /dev/null @@ -1,49 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": - double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) - double bottleneck(vector[pair[double, double]], vector[pair[double, double]]) - -def bottleneck_distance(diagram_1, diagram_2, e=None): - """This function returns the point corresponding to a given vertex. - - :param diagram_1: The first diagram. - :type diagram_1: vector[pair[double, double]] - :param diagram_2: The second diagram. - :type diagram_2: vector[pair[double, double]] - :param e: If `e` is 0, this uses an expensive algorithm to compute the - exact distance. - If `e` is not 0, it asks for an additive `e`-approximation, and - currently also allows a small multiplicative error (the last 2 or 3 - bits of the mantissa may be wrong). This version of the algorithm takes - advantage of the limited precision of `double` and is usually a lot - faster to compute, whatever the value of `e`. - - Thus, by default, `e` is the smallest positive double. - :type e: float - :rtype: float - :returns: the bottleneck distance. - """ - if e is None: - # Default value is the smallest double value (not 0, 0 is for exact version) - return bottleneck(diagram_1, diagram_2) - else: - # Can be 0 for exact version - return bottleneck(diagram_1, diagram_2, e) diff --git a/src/cython/gudhi/cubical_complex.pyx b/src/cython/gudhi/cubical_complex.pyx deleted file mode 100644 index 0dc133d1..00000000 --- a/src/cython/gudhi/cubical_complex.pyx +++ /dev/null @@ -1,188 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -from numpy import array as np_array - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": - cdef cppclass Bitmap_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<>": - Bitmap_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells) - Bitmap_cubical_complex_base_interface(string perseus_file) - int num_simplices() - int dimension() - -cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>": - Cubical_complex_persistence_interface(Bitmap_cubical_complex_base_interface * st, bool persistence_dim_max) - vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) - vector[int] betti_numbers() - vector[int] persistent_betti_numbers(double from_value, double to_value) - vector[pair[double,double]] intervals_in_dimension(int dimension) - -# CubicalComplex python interface -cdef class CubicalComplex: - """The CubicalComplex is an example of a structured complex useful in - computational mathematics (specially rigorous numerics) and image - analysis. - """ - cdef Bitmap_cubical_complex_base_interface * thisptr - - cdef Cubical_complex_persistence_interface * pcohptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, dimensions=None, top_dimensional_cells=None, - perseus_file=''): - """CubicalComplex constructor from dimensions and - top_dimensional_cells or from a Perseus-style file name. - - :param dimensions: A list of number of top dimensional cells. - :type dimensions: list of int - :param top_dimensional_cells: A list of cells filtration values. - :type top_dimensional_cells: list of double - - Or - - :param perseus_file: A Perseus-style file name. - :type perseus_file: string - """ - - # The real cython constructor - def __cinit__(self, dimensions=None, top_dimensional_cells=None, - perseus_file=''): - if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''): - self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells) - elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''): - if os.path.isfile(perseus_file): - self.thisptr = new Bitmap_cubical_complex_base_interface(str.encode(perseus_file)) - else: - print("file " + perseus_file + " not found.") - else: - print("CubicalComplex can be constructed from dimensions and " - "top_dimensional_cells or from a Perseus-style file name.") - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - if self.pcohptr != NULL: - del self.pcohptr - - def __is_defined(self): - """Returns true if CubicalComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def __is_persistence_defined(self): - """Returns true if Persistence pointer is not NULL. - """ - return self.pcohptr != NULL - - def num_simplices(self): - """This function returns the number of all cubes in the complex. - - :returns: int -- the number of all cubes in the complex. - """ - return self.thisptr.num_simplices() - - def dimension(self): - """This function returns the dimension of the complex. - - :returns: int -- the complex dimension. - """ - return self.thisptr.dimension() - - def persistence(self, homology_coeff_field=11, min_persistence=0): - """This function returns the persistence of the complex. - - :param homology_coeff_field: The homology coefficient field. Must be a - prime number - :type homology_coeff_field: int. - :param min_persistence: The minimum persistence value to take into - account (strictly greater than min_persistence). Default value is - 0.0. - Sets min_persistence to -1.0 to see all values. - :type min_persistence: float. - :returns: list of pairs(dimension, pair(birth, death)) -- the - persistence of the complex. - """ - if self.pcohptr != NULL: - del self.pcohptr - if self.thisptr != NULL: - self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True) - cdef vector[pair[int, pair[double, double]]] persistence_result - if self.pcohptr != NULL: - persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) - return persistence_result - - def betti_numbers(self): - """This function returns the Betti numbers of the complex. - - :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). - - :note: betti_numbers function requires persistence function to be - launched first. - - :note: betti_numbers function always returns [1, 0, 0, ...] as infinity - filtration cubes are not removed from the complex. - """ - cdef vector[int] bn_result - if self.pcohptr != NULL: - bn_result = self.pcohptr.betti_numbers() - return bn_result - - def persistent_betti_numbers(self, from_value, to_value): - """This function returns the persistent Betti numbers of the complex. - - :param from_value: The persistence birth limit to be added in the - numbers (persistent birth <= from_value). - :type from_value: float. - :param to_value: The persistence death limit to be added in the - numbers (persistent death > to_value). - :type to_value: float. - - :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., - Bn]). - - :note: persistent_betti_numbers function requires persistence - function to be launched first. - """ - cdef vector[int] pbn_result - if self.pcohptr != NULL: - pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) - return pbn_result - - def persistence_intervals_in_dimension(self, dimension): - """This function returns the persistence intervals of the complex in a - specific dimension. - - :param dimension: The specific dimension. - :type dimension: int. - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - - :note: intervals_in_dim function requires persistence function to be - launched first. - """ - cdef vector[pair[double,double]] intervals_result - if self.pcohptr != NULL: - intervals_result = self.pcohptr.intervals_in_dimension(dimension) - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") - return np_array(intervals_result) diff --git a/src/cython/gudhi/euclidean_strong_witness_complex.pyx b/src/cython/gudhi/euclidean_strong_witness_complex.pyx deleted file mode 100644 index 5d6e4fb9..00000000 --- a/src/cython/gudhi/euclidean_strong_witness_complex.pyx +++ /dev/null @@ -1,92 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libc.stdint cimport intptr_t - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface": - Euclidean_strong_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - vector[double] get_point(unsigned vertex) - -# EuclideanStrongWitnessComplex python interface -cdef class EuclideanStrongWitnessComplex: - """Constructs strong witness complex for given sets of witnesses and - landmarks in Euclidean space. - """ - - cdef Euclidean_strong_witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, landmarks=None, witnesses=None): - """WitnessComplex constructor. - - :param landmarks: A list of landmarks (in the point cloud). - :type landmarks: list of list of double - - :param witnesses: The point cloud. - :type witnesses: list of list of double - """ - - # The real cython constructor - def __cinit__(self, landmarks=None, witnesses=None): - if landmarks is not None and witnesses is not None: - self.thisptr = new Euclidean_strong_witness_complex_interface(landmarks, witnesses) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if WitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): - """ - :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square) - return stree - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int. - :returns: The point. - :rtype: list of float - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - diff --git a/src/cython/gudhi/euclidean_witness_complex.pyx b/src/cython/gudhi/euclidean_witness_complex.pyx deleted file mode 100644 index 2531919b..00000000 --- a/src/cython/gudhi/euclidean_witness_complex.pyx +++ /dev/null @@ -1,92 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libc.stdint cimport intptr_t - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface": - Euclidean_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - vector[double] get_point(unsigned vertex) - -# EuclideanWitnessComplex python interface -cdef class EuclideanWitnessComplex: - """Constructs (weak) witness complex for given sets of witnesses and - landmarks in Euclidean space. - """ - - cdef Euclidean_witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, landmarks=None, witnesses=None): - """WitnessComplex constructor. - - :param landmarks: A list of landmarks (in the point cloud). - :type landmarks: list of list of double - - :param witnesses: The point cloud. - :type witnesses: list of list of double - """ - - # The real cython constructor - def __cinit__(self, landmarks=None, witnesses=None): - if landmarks is not None and witnesses is not None: - self.thisptr = new Euclidean_witness_complex_interface(landmarks, witnesses) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if WitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): - """ - :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square) - return stree - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int. - :returns: The point. - :rtype: list of float - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - diff --git a/src/cython/gudhi/nerve_gic.pyx b/src/cython/gudhi/nerve_gic.pyx deleted file mode 100644 index 2b230b8c..00000000 --- a/src/cython/gudhi/nerve_gic.pyx +++ /dev/null @@ -1,412 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os -from libc.stdint cimport intptr_t - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "GPL v3" - -cdef extern from "Nerve_gic_interface.h" namespace "Gudhi": - cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface": - Nerve_gic_interface() - double compute_confidence_level_from_distance(double distance) - double compute_distance_from_confidence_level(double alpha) - void compute_distribution(int N) - double compute_p_value() - vector[pair[double, double]] compute_PD() - void find_simplices() - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) - bool read_point_cloud(string off_file_name) - double set_automatic_resolution() - void set_color_from_coordinate(int k) - void set_color_from_file(string color_file_name) - void set_color_from_range(vector[double] color) - void set_cover_from_file(string cover_file_name) - void set_cover_from_function() - void set_cover_from_Euclidean_Voronoi(int m) - void set_function_from_coordinate(int k) - void set_function_from_file(string func_file_name) - void set_function_from_range(vector[double] function) - void set_gain(double g) - double set_graph_from_automatic_euclidean_rips(int N) - void set_graph_from_file(string graph_file_name) - void set_graph_from_OFF() - void set_graph_from_euclidean_rips(double threshold) - void set_mask(int nodemask) - void set_resolution_with_interval_length(double resolution) - void set_resolution_with_interval_number(int resolution) - void set_subsampling(double constant, double power) - void set_type(string type) - void set_verbose(bool verbose) - vector[int] subpopulation(int c) - void write_info() - void plot_DOT() - void plot_OFF() - void set_point_cloud_from_range(vector[vector[double]] cloud) - void set_distances_from_range(vector[vector[double]] distance_matrix) - -# CoverComplex python interface -cdef class CoverComplex: - """Cover complex data structure. - - The data structure is a simplicial complex, representing a Graph Induced - simplicial Complex (GIC) or a Nerve, and whose simplices are computed with - a cover C of a point cloud P, which often comes from the preimages of - intervals covering the image of a function f defined on P. These intervals - are parameterized by their resolution (either their length or their number) - and their gain (percentage of overlap). To compute a GIC, one also needs a - graph G built on top of P, whose cliques with vertices belonging to - different elements of C correspond to the simplices of the GIC. - """ - - cdef Nerve_gic_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self): - """CoverComplex constructor. - """ - - # The real cython constructor - def __cinit__(self): - self.thisptr = new Nerve_gic_interface() - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if CoverComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def set_point_cloud_from_range(self, cloud): - """ Reads and stores the input point cloud from a vector stored in memory. - - :param cloud: Input vector containing the point cloud. - :type cloud: vector[vector[double]] - """ - return self.thisptr.set_point_cloud_from_range(cloud) - - def set_distances_from_range(self, distance_matrix): - """ Reads and stores the input distance matrix from a vector stored in memory. - - :param distance_matrix: Input vector containing the distance matrix. - :type distance_matrix: vector[vector[double]] - """ - return self.thisptr.set_distances_from_range(distance_matrix) - - def compute_confidence_level_from_distance(self, distance): - """Computes the confidence level of a specific bottleneck distance - threshold. - - :param distance: Bottleneck distance. - :type distance: double - :rtype: double - :returns: Confidence level. - """ - return self.thisptr.compute_confidence_level_from_distance(distance) - - def compute_distance_from_confidence_level(self, alpha): - """Computes the bottleneck distance threshold corresponding to a - specific confidence level. - - :param alpha: Confidence level. - :type alpha: double - :rtype: double - :returns: Bottleneck distance. - """ - return self.thisptr.compute_distance_from_confidence_level(alpha) - - def compute_distribution(self, N=100): - """Computes bootstrapped distances distribution. - - :param N: Loop number (default value is 100). - :type alpha: int - """ - self.thisptr.compute_distribution(N) - - def compute_p_value(self): - """Computes the p-value, i.e. the opposite of the confidence level of - the largest bottleneck distance preserving the points in the - persistence diagram of the output simplicial complex. - - :rtype: double - :returns: p-value. - """ - return self.thisptr.compute_p_value() - - def compute_PD(self): - """Computes the extended persistence diagram of the complex. - """ - return self.thisptr.compute_PD() - - def create_simplex_tree(self): - """ - :returns: A simplex tree created from the Cover complex. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - self.thisptr.create_simplex_tree(stree_int_ptr) - return stree - - def find_simplices(self): - """Computes the simplices of the simplicial complex. - """ - self.thisptr.find_simplices() - - def read_point_cloud(self, off_file): - """Reads and stores the input point cloud from .(n)OFF file. - - :param off_file: Name of the input .OFF or .nOFF file. - :type off_file: string - :rtype: bool - :returns: Read file status. - """ - if os.path.isfile(off_file): - return self.thisptr.read_point_cloud(str.encode(off_file)) - else: - print("file " + off_file + " not found.") - return False - - def set_automatic_resolution(self): - """Computes the optimal length of intervals (i.e. the smallest interval - length avoiding discretization artifacts—see :cite:`Carriere17c`) for a - functional cover. - - :rtype: double - :returns: reso interval length used to compute the cover. - """ - return self.thisptr.set_automatic_resolution() - - def set_color_from_coordinate(self, k=0): - """Computes the function used to color the nodes of the simplicial - complex from the k-th coordinate. - - :param k: Coordinate to use (start at 0). Default value is 0. - :type k: int - """ - return self.thisptr.set_color_from_coordinate(k) - - def set_color_from_file(self, color_file_name): - """Computes the function used to color the nodes of the simplicial - complex from a file containing the function values. - - :param color_file_name: Name of the input color file. - :type color_file_name: string - """ - if os.path.isfile(color_file_name): - self.thisptr.set_color_from_file(str.encode(color_file_name)) - else: - print("file " + color_file_name + " not found.") - - def set_color_from_range(self, color): - """Computes the function used to color the nodes of the simplicial - complex from a vector stored in memory. - - :param color: Input vector of values. - :type color: vector[double] - """ - self.thisptr.set_color_from_range(color) - - def set_cover_from_file(self, cover_file_name): - """Creates the cover C from a file containing the cover elements of - each point (the order has to be the same as in the input file!). - - :param cover_file_name: Name of the input cover file. - :type cover_file_name: string - """ - if os.path.isfile(cover_file_name): - self.thisptr.set_cover_from_file(str.encode(cover_file_name)) - else: - print("file " + cover_file_name + " not found.") - - def set_cover_from_function(self): - """Creates a cover C from the preimages of the function f. - """ - self.thisptr.set_cover_from_function() - - def set_cover_from_Voronoi(self, m=100): - """Creates the cover C from the Voronoï cells of a subsampling of the - point cloud. - - :param m: Number of points in the subsample. Default value is 100. - :type m: int - """ - self.thisptr.set_cover_from_Euclidean_Voronoi(m) - - def set_function_from_coordinate(self, k): - """Creates the function f from the k-th coordinate of the point cloud. - - :param k: Coordinate to use (start at 0). - :type k: int - """ - self.thisptr.set_function_from_coordinate(k) - - def set_function_from_file(self, func_file_name): - """Creates the function f from a file containing the function values. - - :param func_file_name: Name of the input function file. - :type func_file_name: string - """ - if os.path.isfile(func_file_name): - self.thisptr.set_function_from_file(str.encode(func_file_name)) - else: - print("file " + func_file_name + " not found.") - - def set_function_from_range(self, function): - """Creates the function f from a vector stored in memory. - - :param function: Input vector of values. - :type function: vector[double] - """ - self.thisptr.set_function_from_range(function) - - def set_gain(self, g = 0.3): - """Sets a gain from a value stored in memory. - - :param g: Gain (default value is 0.3). - :type g: double - """ - self.thisptr.set_gain(g) - - def set_graph_from_automatic_rips(self, N=100): - """Creates a graph G from a Rips complex whose threshold value is - automatically tuned with subsampling—see. - - :param N: Number of subsampling iteration (the default reasonable value - is 100, but there is no guarantee on how to choose it). - :type N: int - :rtype: double - :returns: Delta threshold used for computing the Rips complex. - """ - return self.thisptr.set_graph_from_automatic_euclidean_rips(N) - - def set_graph_from_file(self, graph_file_name): - """Creates a graph G from a file containing the edges. - - :param graph_file_name: Name of the input graph file. The graph file - contains one edge per line, each edge being represented by the IDs of - its two nodes. - :type graph_file_name: string - """ - if os.path.isfile(graph_file_name): - self.thisptr.set_graph_from_file(str.encode(graph_file_name)) - else: - print("file " + graph_file_name + " not found.") - - def set_graph_from_OFF(self): - """Creates a graph G from the triangulation given by the input OFF - file. - """ - self.thisptr.set_graph_from_OFF() - - def set_graph_from_rips(self, threshold): - """Creates a graph G from a Rips complex. - - :param threshold: Threshold value for the Rips complex. - :type threshold: double - """ - self.thisptr.set_graph_from_euclidean_rips(threshold) - - def set_mask(self, nodemask): - """Sets the mask, which is a threshold integer such that nodes in the - complex that contain a number of data points which is less than or - equal to this threshold are not displayed. - - :param nodemask: Threshold. - :type nodemask: int - """ - self.thisptr.set_mask(nodemask) - - def set_resolution_with_interval_length(self, resolution): - """Sets a length of intervals from a value stored in memory. - - :param resolution: Length of intervals. - :type resolution: double - """ - self.thisptr.set_resolution_with_interval_length(resolution) - - def set_resolution_with_interval_number(self, resolution): - """Sets a number of intervals from a value stored in memory. - - :param resolution: Number of intervals. - :type resolution: int - """ - self.thisptr.set_resolution_with_interval_number(resolution) - - def set_subsampling(self, constant, power): - """Sets the constants used to subsample the data set. These constants - are explained in :cite:`Carriere17c`. - - :param constant: Constant. - :type constant: double - :param power: Power. - :type resolution: double - """ - self.thisptr.set_subsampling(constant, power) - - def set_type(self, type): - """Specifies whether the type of the output simplicial complex. - - :param type: either "GIC" or "Nerve". - :type type: string - """ - self.thisptr.set_type(str.encode(type)) - - def set_verbose(self, verbose): - """Specifies whether the program should display information or not. - - :param verbose: true = display info, false = do not display info. - :type verbose: boolean - """ - self.thisptr.set_verbose(verbose) - - def subpopulation(self, c): - """Returns the data subset corresponding to a specific node of the - created complex. - - :param c: ID of the node. - :type c: int - :rtype: vector[int] - :returns: Vector of IDs of data points. - """ - return self.thisptr.subpopulation(c) - - def write_info(self): - """Creates a .txt file called SC.txt describing the 1-skeleton, which can - then be plotted with e.g. KeplerMapper. - """ - return self.thisptr.write_info() - - def plot_dot(self): - """Creates a .dot file called SC.dot for neato (part of the graphviz - package) once the simplicial complex is computed to get a visualization of - its 1-skeleton in a .pdf file. - """ - return self.thisptr.plot_DOT() - - def plot_off(self): - """Creates a .off file called SC.off for 3D visualization, which contains - the 2-skeleton of the GIC. This function assumes that the cover has been - computed with Voronoi. If data points are in 1D or 2D, the remaining - coordinates of the points embedded in 3D are set to 0. - """ - return self.thisptr.plot_OFF() diff --git a/src/cython/gudhi/off_reader.pyx b/src/cython/gudhi/off_reader.pyx deleted file mode 100644 index 9efd97ff..00000000 --- a/src/cython/gudhi/off_reader.pyx +++ /dev/null @@ -1,38 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.string cimport string -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Off_reader_interface.h" namespace "Gudhi": - vector[vector[double]] read_points_from_OFF_file(string off_file) - -def read_off(off_file=''): - """Read points from OFF file. - - :param off_file: An OFF file style name. - :type off_file: string - - :returns: The point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - return read_points_from_OFF_file(str.encode(off_file)) - else: - print("file " + off_file + " not found.") - return [] - diff --git a/src/cython/gudhi/periodic_cubical_complex.pyx b/src/cython/gudhi/periodic_cubical_complex.pyx deleted file mode 100644 index 724fadd4..00000000 --- a/src/cython/gudhi/periodic_cubical_complex.pyx +++ /dev/null @@ -1,190 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -import os - -from numpy import array as np_array - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": - cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface>": - Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells, vector[bool] periodic_dimensions) - Periodic_cubical_complex_base_interface(string perseus_file) - int num_simplices() - int dimension() - -cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Periodic_cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>>": - Periodic_cubical_complex_persistence_interface(Periodic_cubical_complex_base_interface * st, bool persistence_dim_max) - vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) - vector[int] betti_numbers() - vector[int] persistent_betti_numbers(double from_value, double to_value) - vector[pair[double,double]] intervals_in_dimension(int dimension) - -# PeriodicCubicalComplex python interface -cdef class PeriodicCubicalComplex: - """The PeriodicCubicalComplex is an example of a structured complex useful - in computational mathematics (specially rigorous numerics) and image - analysis. - """ - cdef Periodic_cubical_complex_base_interface * thisptr - - cdef Periodic_cubical_complex_persistence_interface * pcohptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, dimensions=None, top_dimensional_cells=None, - periodic_dimensions=None, perseus_file=''): - """PeriodicCubicalComplex constructor from dimensions and - top_dimensional_cells or from a Perseus-style file name. - - :param dimensions: A list of number of top dimensional cells. - :type dimensions: list of int - :param top_dimensional_cells: A list of cells filtration values. - :type top_dimensional_cells: list of double - :param periodic_dimensions: A list of top dimensional cells periodicity value. - :type periodic_dimensions: list of boolean - - Or - - :param perseus_file: A Perseus-style file name. - :type perseus_file: string - """ - - # The real cython constructor - def __cinit__(self, dimensions=None, top_dimensional_cells=None, - periodic_dimensions=None, perseus_file=''): - if (dimensions is not None) and (top_dimensional_cells is not None) and (periodic_dimensions is not None) and (perseus_file is ''): - self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells, periodic_dimensions) - elif (dimensions is None) and (top_dimensional_cells is None) and (periodic_dimensions is None) and (perseus_file is not ''): - if os.path.isfile(perseus_file): - self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file)) - else: - print("file " + perseus_file + " not found.") - else: - print("CubicalComplex can be constructed from dimensions and " - "top_dimensional_cells or from a Perseus-style file name.") - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - if self.pcohptr != NULL: - del self.pcohptr - - def __is_defined(self): - """Returns true if PeriodicCubicalComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def __is_persistence_defined(self): - """Returns true if Persistence pointer is not NULL. - """ - return self.pcohptr != NULL - - def num_simplices(self): - """This function returns the number of all cubes in the complex. - - :returns: int -- the number of all cubes in the complex. - """ - return self.thisptr.num_simplices() - - def dimension(self): - """This function returns the dimension of the complex. - - :returns: int -- the complex dimension. - """ - return self.thisptr.dimension() - - def persistence(self, homology_coeff_field=11, min_persistence=0): - """This function returns the persistence of the complex. - - :param homology_coeff_field: The homology coefficient field. Must be a - prime number - :type homology_coeff_field: int. - :param min_persistence: The minimum persistence value to take into - account (strictly greater than min_persistence). Default value is - 0.0. - Sets min_persistence to -1.0 to see all values. - :type min_persistence: float. - :returns: list of pairs(dimension, pair(birth, death)) -- the - persistence of the complex. - """ - if self.pcohptr != NULL: - del self.pcohptr - if self.thisptr != NULL: - self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True) - cdef vector[pair[int, pair[double, double]]] persistence_result - if self.pcohptr != NULL: - persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) - return persistence_result - - def betti_numbers(self): - """This function returns the Betti numbers of the complex. - - :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). - - :note: betti_numbers function requires persistence function to be - launched first. - - :note: betti_numbers function always returns [1, 0, 0, ...] as infinity - filtration cubes are not removed from the complex. - """ - cdef vector[int] bn_result - if self.pcohptr != NULL: - bn_result = self.pcohptr.betti_numbers() - return bn_result - - def persistent_betti_numbers(self, from_value, to_value): - """This function returns the persistent Betti numbers of the complex. - - :param from_value: The persistence birth limit to be added in the - numbers (persistent birth <= from_value). - :type from_value: float. - :param to_value: The persistence death limit to be added in the - numbers (persistent death > to_value). - :type to_value: float. - - :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., - Bn]). - - :note: persistent_betti_numbers function requires persistence - function to be launched first. - """ - cdef vector[int] pbn_result - if self.pcohptr != NULL: - pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) - return pbn_result - - def persistence_intervals_in_dimension(self, dimension): - """This function returns the persistence intervals of the complex in a - specific dimension. - - :param dimension: The specific dimension. - :type dimension: int. - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - - :note: intervals_in_dim function requires persistence function to be - launched first. - """ - cdef vector[pair[double,double]] intervals_result - if self.pcohptr != NULL: - intervals_result = self.pcohptr.intervals_in_dimension(dimension) - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") - return np_array(intervals_result) diff --git a/src/cython/gudhi/persistence_graphical_tools.py b/src/cython/gudhi/persistence_graphical_tools.py deleted file mode 100644 index 181bc8ea..00000000 --- a/src/cython/gudhi/persistence_graphical_tools.py +++ /dev/null @@ -1,423 +0,0 @@ -from os import path -from math import isfinite -import numpy as np - -from gudhi.reader_utils import read_persistence_intervals_in_dimension -from gudhi.reader_utils import read_persistence_intervals_grouped_by_dimension - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau, Bertrand Michel - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau, Bertrand Michel" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def __min_birth_max_death(persistence, band=0.0): - """This function returns (min_birth, max_death) from the persistence. - - :param persistence: The persistence to plot. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param band: band - :type band: float. - :returns: (float, float) -- (min_birth, max_death). - """ - # Look for minimum birth date and maximum death date for plot optimisation - max_death = 0 - min_birth = persistence[0][1][0] - for interval in reversed(persistence): - if float(interval[1][1]) != float("inf"): - if float(interval[1][1]) > max_death: - max_death = float(interval[1][1]) - if float(interval[1][0]) > max_death: - max_death = float(interval[1][0]) - if float(interval[1][0]) < min_birth: - min_birth = float(interval[1][0]) - if band > 0.0: - max_death += band - return (min_birth, max_death) - - -""" -Only 13 colors for the palette -""" -palette = [ - "#ff0000", - "#00ff00", - "#0000ff", - "#00ffff", - "#ff00ff", - "#ffff00", - "#000000", - "#880000", - "#008800", - "#000088", - "#888800", - "#880088", - "#008888", -] - - -def plot_persistence_barcode( - persistence=[], - persistence_file="", - alpha=0.6, - max_intervals=1000, - max_barcodes=1000, - inf_delta=0.1, - legend=False, -): - """This function plots the persistence bar code from persistence values list - or from a :doc:`persistence file `. - - :param persistence: Persistence intervals values list grouped by dimension. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param persistence_file: A :doc:`persistence file ` style name - (reset persistence if both are set). - :type persistence_file: string - :param alpha: barcode transparency value (0.0 transparent through 1.0 - opaque - default is 0.6). - :type alpha: float. - :param max_intervals: maximal number of intervals to display. - Selected intervals are those with the longest life time. Set it - to 0 to see all. Default value is 1000. - :type max_intervals: int. - :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x - inf_delta)` above :code:`max_death` value. A reasonable value is - between 0.05 and 0.5 - default is 0.1. - :type inf_delta: float. - :param legend: Display the dimension color legend (default is False). - :type legend: boolean. - :returns: A matplotlib object containing horizontal bar plot of persistence - (launch `show()` method on it to display it). - """ - try: - import matplotlib.pyplot as plt - import matplotlib.patches as mpatches - - if persistence_file is not "": - if path.isfile(persistence_file): - # Reset persistence - persistence = [] - diag = read_persistence_intervals_grouped_by_dimension( - persistence_file=persistence_file - ) - for key in diag.keys(): - for persistence_interval in diag[key]: - persistence.append((key, persistence_interval)) - else: - print("file " + persistence_file + " not found.") - return None - - if max_barcodes is not 1000: - print("Deprecated parameter. It has been replaced by max_intervals") - max_intervals = max_barcodes - - if max_intervals > 0 and max_intervals < len(persistence): - # Sort by life time, then takes only the max_intervals elements - persistence = sorted( - persistence, - key=lambda life_time: life_time[1][1] - life_time[1][0], - reverse=True, - )[:max_intervals] - - persistence = sorted(persistence, key=lambda birth: birth[1][0]) - - (min_birth, max_death) = __min_birth_max_death(persistence) - ind = 0 - delta = (max_death - min_birth) * inf_delta - # Replace infinity values with max_death + delta for bar code to be more - # readable - infinity = max_death + delta - axis_start = min_birth - delta - # Draw horizontal bars in loop - for interval in reversed(persistence): - if float(interval[1][1]) != float("inf"): - # Finite death case - plt.barh( - ind, - (interval[1][1] - interval[1][0]), - height=0.8, - left=interval[1][0], - alpha=alpha, - color=palette[interval[0]], - linewidth=0, - ) - else: - # Infinite death case for diagram to be nicer - plt.barh( - ind, - (infinity - interval[1][0]), - height=0.8, - left=interval[1][0], - alpha=alpha, - color=palette[interval[0]], - linewidth=0, - ) - ind = ind + 1 - - if legend: - dimensions = list(set(item[0] for item in persistence)) - plt.legend( - handles=[ - mpatches.Patch(color=palette[dim], label=str(dim)) - for dim in dimensions - ], - loc="lower right", - ) - plt.title("Persistence barcode") - # Ends plot on infinity value and starts a little bit before min_birth - plt.axis([axis_start, infinity, 0, ind]) - return plt - - except ImportError: - print("This function is not available, you may be missing matplotlib.") - - -def plot_persistence_diagram( - persistence=[], - persistence_file="", - alpha=0.6, - band=0.0, - max_intervals=1000, - max_plots=1000, - inf_delta=0.1, - legend=False, -): - """This function plots the persistence diagram from persistence values - list or from a :doc:`persistence file `. - - :param persistence: Persistence intervals values list grouped by dimension. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param persistence_file: A :doc:`persistence file ` style name - (reset persistence if both are set). - :type persistence_file: string - :param alpha: plot transparency value (0.0 transparent through 1.0 - opaque - default is 0.6). - :type alpha: float. - :param band: band (not displayed if :math:`\leq` 0. - default is 0.) - :type band: float. - :param max_intervals: maximal number of intervals to display. - Selected intervals are those with the longest life time. Set it - to 0 to see all. Default value is 1000. - :type max_intervals: int. - :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x - inf_delta)` above :code:`max_death` value. A reasonable value is - between 0.05 and 0.5 - default is 0.1. - :type inf_delta: float. - :param legend: Display the dimension color legend (default is False). - :type legend: boolean. - :returns: A matplotlib object containing diagram plot of persistence - (launch `show()` method on it to display it). - """ - try: - import matplotlib.pyplot as plt - import matplotlib.patches as mpatches - - if persistence_file is not "": - if path.isfile(persistence_file): - # Reset persistence - persistence = [] - diag = read_persistence_intervals_grouped_by_dimension( - persistence_file=persistence_file - ) - for key in diag.keys(): - for persistence_interval in diag[key]: - persistence.append((key, persistence_interval)) - else: - print("file " + persistence_file + " not found.") - return None - - if max_plots is not 1000: - print("Deprecated parameter. It has been replaced by max_intervals") - max_intervals = max_plots - - if max_intervals > 0 and max_intervals < len(persistence): - # Sort by life time, then takes only the max_intervals elements - persistence = sorted( - persistence, - key=lambda life_time: life_time[1][1] - life_time[1][0], - reverse=True, - )[:max_intervals] - - (min_birth, max_death) = __min_birth_max_death(persistence, band) - delta = (max_death - min_birth) * inf_delta - # Replace infinity values with max_death + delta for diagram to be more - # readable - infinity = max_death + delta - axis_start = min_birth - delta - - # line display of equation : birth = death - x = np.linspace(axis_start, infinity, 1000) - # infinity line and text - plt.plot(x, x, color="k", linewidth=1.0) - plt.plot(x, [infinity] * len(x), linewidth=1.0, color="k", alpha=alpha) - plt.text(axis_start, infinity, r"$\infty$", color="k", alpha=alpha) - # bootstrap band - if band > 0.0: - plt.fill_between(x, x, x + band, alpha=alpha, facecolor="red") - - # Draw points in loop - for interval in reversed(persistence): - if float(interval[1][1]) != float("inf"): - # Finite death case - plt.scatter( - interval[1][0], - interval[1][1], - alpha=alpha, - color=palette[interval[0]], - ) - else: - # Infinite death case for diagram to be nicer - plt.scatter( - interval[1][0], infinity, alpha=alpha, color=palette[interval[0]] - ) - - if legend: - dimensions = list(set(item[0] for item in persistence)) - plt.legend( - handles=[ - mpatches.Patch(color=palette[dim], label=str(dim)) - for dim in dimensions - ] - ) - - plt.title("Persistence diagram") - plt.xlabel("Birth") - plt.ylabel("Death") - # Ends plot on infinity value and starts a little bit before min_birth - plt.axis([axis_start, infinity, axis_start, infinity + delta]) - return plt - - except ImportError: - print("This function is not available, you may be missing matplotlib.") - - -def plot_persistence_density( - persistence=[], - persistence_file="", - nbins=300, - bw_method=None, - max_intervals=1000, - dimension=None, - cmap=None, - legend=False, -): - """This function plots the persistence density from persistence - values list or from a :doc:`persistence file `. Be - aware that this function does not distinguish the dimension, it is - up to you to select the required one. This function also does not handle - degenerate data set (scipy correlation matrix inversion can fail). - - :param persistence: Persistence intervals values list grouped by dimension. - :type persistence: list of tuples(dimension, tuple(birth, death)). - :param persistence_file: A :doc:`persistence file ` - style name (reset persistence if both are set). - :type persistence_file: string - :param nbins: Evaluate a gaussian kde on a regular grid of nbins x - nbins over data extents (default is 300) - :type nbins: int. - :param bw_method: The method used to calculate the estimator - bandwidth. This can be 'scott', 'silverman', a scalar constant - or a callable. If a scalar, this will be used directly as - kde.factor. If a callable, it should take a gaussian_kde - instance as only parameter and return a scalar. If None - (default), 'scott' is used. See - `scipy.stats.gaussian_kde documentation - `_ - for more details. - :type bw_method: str, scalar or callable, optional. - :param max_intervals: maximal number of points used in the density - estimation. - Selected intervals are those with the longest life time. Set it - to 0 to see all. Default value is 1000. - :type max_intervals: int. - :param dimension: the dimension to be selected in the intervals - (default is None to mix all dimensions). - :type dimension: int. - :param cmap: A matplotlib colormap (default is - matplotlib.pyplot.cm.hot_r). - :type cmap: cf. matplotlib colormap. - :param legend: Display the color bar values (default is False). - :type legend: boolean. - :returns: A matplotlib object containing diagram plot of persistence - (launch `show()` method on it to display it). - """ - try: - import matplotlib.pyplot as plt - from scipy.stats import kde - - if persistence_file is not "": - if dimension is None: - # All dimension case - dimension = -1 - if path.isfile(persistence_file): - persistence_dim = read_persistence_intervals_in_dimension( - persistence_file=persistence_file, only_this_dim=dimension - ) - print(persistence_dim) - else: - print("file " + persistence_file + " not found.") - return None - - if len(persistence) > 0: - persistence_dim = np.array( - [ - (dim_interval[1][0], dim_interval[1][1]) - for dim_interval in persistence - if (dim_interval[0] == dimension) or (dimension is None) - ] - ) - - persistence_dim = persistence_dim[np.isfinite(persistence_dim[:, 1])] - if max_intervals > 0 and max_intervals < len(persistence_dim): - # Sort by life time, then takes only the max_intervals elements - persistence_dim = np.array( - sorted( - persistence_dim, - key=lambda life_time: life_time[1] - life_time[0], - reverse=True, - )[:max_intervals] - ) - - # Set as numpy array birth and death (remove undefined values - inf and NaN) - birth = persistence_dim[:, 0] - death = persistence_dim[:, 1] - - # line display of equation : birth = death - x = np.linspace(death.min(), birth.max(), 1000) - plt.plot(x, x, color="k", linewidth=1.0) - - # Evaluate a gaussian kde on a regular grid of nbins x nbins over data extents - k = kde.gaussian_kde([birth, death], bw_method=bw_method) - xi, yi = np.mgrid[ - birth.min() : birth.max() : nbins * 1j, - death.min() : death.max() : nbins * 1j, - ] - zi = k(np.vstack([xi.flatten(), yi.flatten()])) - - # default cmap value cannot be done at argument definition level as matplotlib is not yet defined. - if cmap is None: - cmap = plt.cm.hot_r - # Make the plot - plt.pcolormesh(xi, yi, zi.reshape(xi.shape), cmap=cmap) - - if legend: - plt.colorbar() - - plt.title("Persistence density") - plt.xlabel("Birth") - plt.ylabel("Death") - return plt - - except ImportError: - print( - "This function is not available, you may be missing matplotlib and/or scipy." - ) diff --git a/src/cython/gudhi/reader_utils.pyx b/src/cython/gudhi/reader_utils.pyx deleted file mode 100644 index 147fae71..00000000 --- a/src/cython/gudhi/reader_utils.pyx +++ /dev/null @@ -1,87 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.string cimport string -from libcpp.map cimport map -from libcpp.pair cimport pair - -from os import path -from numpy import array as np_array - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2017 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2017 Inria" -__license__ = "MIT" - -cdef extern from "Reader_utils_interface.h" namespace "Gudhi": - vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator) - map[int, vector[pair[double, double]]] read_pers_intervals_grouped_by_dimension(string filename) - vector[pair[double, double]] read_pers_intervals_in_dimension(string filename, int only_this_dim) - -def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'): - """Read lower triangular matrix from a CSV style file. - - :param csv_file: A CSV file style name. - :type csv_file: string - :param separator: The value separator in the CSV file. Default value is ';' - :type separator: char - - :returns: The lower triangular matrix. - :rtype: vector[vector[double]] - """ - if csv_file is not '': - if path.isfile(csv_file): - return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0])) - print("file " + csv_file + " not set or not found.") - return [] - -def read_persistence_intervals_grouped_by_dimension(persistence_file=''): - """Reads a file containing persistence intervals. - Each line might contain 2, 3 or 4 values: [[field] dimension] birth death - The return value is an `map[dim, vector[pair[birth, death]]]` - where `dim` is an `int`, `birth` a `double`, and `death` a `double`. - Note: the function does not check that birth <= death. - - :param persistence_file: A persistence file style name. - :type persistence_file: string - - :returns: The persistence pairs grouped by dimension. - :rtype: map[int, vector[pair[double, double]]] - """ - if persistence_file is not '': - if path.isfile(persistence_file): - return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file)) - print("file " + persistence_file + " not set or not found.") - return [] - -def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-1): - """Reads a file containing persistence intervals. - Each line of persistence_file might contain 2, 3 or 4 values: - [[field] dimension] birth death - Note: the function does not check that birth <= death. - - :param persistence_file: A persistence file style name. - :type persistence_file: string - :param only_this_dim: The specific dimension. Default value is -1. - If `only_this_dim` = -1, dimension is ignored and all lines are returned. - If `only_this_dim` is >= 0, only the lines where dimension = - `only_this_dim` (or where dimension is not specified) are returned. - :type only_this_dim: int. - - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - """ - if persistence_file is not '': - if path.isfile(persistence_file): - return np_array(read_pers_intervals_in_dimension(str.encode( - persistence_file), only_this_dim)) - print("file " + persistence_file + " not set or not found.") - return [] diff --git a/src/cython/gudhi/rips_complex.pyx b/src/cython/gudhi/rips_complex.pyx deleted file mode 100644 index f2cd6a8d..00000000 --- a/src/cython/gudhi/rips_complex.pyx +++ /dev/null @@ -1,103 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -from libc.stdint cimport intptr_t - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Rips_complex_interface.h" namespace "Gudhi": - cdef cppclass Rips_complex_interface "Gudhi::rips_complex::Rips_complex_interface": - Rips_complex_interface() - void init_points(vector[vector[double]] values, double threshold) - void init_matrix(vector[vector[double]] values, double threshold) - void init_points_sparse(vector[vector[double]] values, double threshold, double sparse) - void init_matrix_sparse(vector[vector[double]] values, double threshold, double sparse) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max) - -# RipsComplex python interface -cdef class RipsComplex: - """The data structure is a one skeleton graph, or Rips graph, containing - edges when the edge length is less or equal to a given threshold. Edge - length is computed from a user given point cloud with a given distance - function, or a distance matrix. - """ - - cdef Rips_complex_interface thisref - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, points=None, distance_matrix=None, - max_edge_length=float('inf'), sparse=None): - """RipsComplex constructor. - - :param max_edge_length: Rips value. - :type max_edge_length: float - - :param points: A list of points in d-Dimension. - :type points: list of list of double - - Or - - :param distance_matrix: A distance matrix (full square or lower - triangular). - :type points: list of list of double - - And in both cases - - :param sparse: If this is not None, it switches to building a sparse - Rips and represents the approximation parameter epsilon. - :type sparse: float - """ - - # The real cython constructor - def __cinit__(self, points=None, distance_matrix=None, - max_edge_length=float('inf'), sparse=None): - if sparse is not None: - if distance_matrix is not None: - self.thisref.init_matrix_sparse(distance_matrix, - max_edge_length, - sparse) - else: - if points is None: - # Empty Rips construction - points=[] - self.thisref.init_points_sparse(points, max_edge_length, sparse) - else: - if distance_matrix is not None: - self.thisref.init_matrix(distance_matrix, max_edge_length) - else: - if points is None: - # Empty Rips construction - points=[] - self.thisref.init_points(points, max_edge_length) - - - def create_simplex_tree(self, max_dimension=1): - """ - :param max_dimension: graph expansion for rips until this given maximal - dimension. - :type max_dimension: int - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - self.thisref.create_simplex_tree(stree_int_ptr, - max_dimension) - return stree diff --git a/src/cython/gudhi/simplex_tree.pxd b/src/cython/gudhi/simplex_tree.pxd deleted file mode 100644 index 5f86cfe2..00000000 --- a/src/cython/gudhi/simplex_tree.pxd +++ /dev/null @@ -1,56 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp cimport bool -from libcpp.string cimport string - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": - cdef cppclass Simplex_tree_options_full_featured: - pass - - cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface": - Simplex_tree() - double simplex_filtration(vector[int] simplex) - void assign_simplex_filtration(vector[int] simplex, double filtration) - void initialize_filtration() - int num_vertices() - int num_simplices() - void set_dimension(int dimension) - int dimension() - int upper_bound_dimension() - bool find_simplex(vector[int] simplex) - bool insert_simplex_and_subfaces(vector[int] simplex, - double filtration) - vector[pair[vector[int], double]] get_filtration() - vector[pair[vector[int], double]] get_skeleton(int dimension) - vector[pair[vector[int], double]] get_star(vector[int] simplex) - vector[pair[vector[int], double]] get_cofaces(vector[int] simplex, - int dimension) - void expansion(int max_dim) - void remove_maximal_simplex(vector[int] simplex) - bool prune_above_filtration(double filtration) - bool make_filtration_non_decreasing() - -cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": - Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max) - vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) - vector[int] betti_numbers() - vector[int] persistent_betti_numbers(double from_value, double to_value) - vector[pair[double,double]] intervals_in_dimension(int dimension) - void write_output_diagram(string diagram_file_name) - vector[pair[vector[int], vector[int]]] persistence_pairs() diff --git a/src/cython/gudhi/simplex_tree.pyx b/src/cython/gudhi/simplex_tree.pyx deleted file mode 100644 index 9f490271..00000000 --- a/src/cython/gudhi/simplex_tree.pyx +++ /dev/null @@ -1,508 +0,0 @@ -from libc.stdint cimport intptr_t -from numpy import array as np_array -cimport simplex_tree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -# SimplexTree python interface -cdef class SimplexTree: - """The simplex tree is an efficient and flexible data structure for - representing general (filtered) simplicial complexes. The data structure - is described in Jean-Daniel Boissonnat and Clément Maria. The Simplex - Tree: An Efficient Data Structure for General Simplicial Complexes. - Algorithmica, pages 1–22, 2014. - - This class is a filtered, with keys, and non contiguous vertices version - of the simplex tree. - """ - # unfortunately 'cdef public Simplex_tree_interface_full_featured* thisptr' is not possible - # Use intptr_t instead to cast the pointer - cdef public intptr_t thisptr - - # Get the pointer casted as it should be - cdef Simplex_tree_interface_full_featured* get_ptr(self): - return (self.thisptr) - - cdef Simplex_tree_persistence_interface * pcohptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self): - """SimplexTree constructor. - """ - - # The real cython constructor - def __cinit__(self): - self.thisptr = (new Simplex_tree_interface_full_featured()) - - def __dealloc__(self): - cdef Simplex_tree_interface_full_featured* ptr = self.get_ptr() - if ptr != NULL: - del ptr - if self.pcohptr != NULL: - del self.pcohptr - - def __is_defined(self): - """Returns true if SimplexTree pointer is not NULL. - """ - return self.get_ptr() != NULL - - def __is_persistence_defined(self): - """Returns true if Persistence pointer is not NULL. - """ - return self.pcohptr != NULL - - def filtration(self, simplex): - """This function returns the filtration value for a given N-simplex in - this simplicial complex, or +infinity if it is not in the complex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :returns: The simplicial complex filtration value. - :rtype: float - """ - return self.get_ptr().simplex_filtration(simplex) - - def assign_filtration(self, simplex, filtration): - """This function assigns the simplicial complex filtration value for a - given N-simplex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :param filtration: The simplicial complex filtration value. - :type filtration: float - """ - self.get_ptr().assign_simplex_filtration(simplex, filtration) - - def initialize_filtration(self): - """This function initializes and sorts the simplicial complex - filtration vector. - - .. note:: - - This function must be launched before - :func:`persistence()`, - :func:`betti_numbers()`, - :func:`persistent_betti_numbers()`, - or :func:`get_filtration()` - after :func:`inserting` or - :func:`removing` - simplices. - """ - self.get_ptr().initialize_filtration() - - def num_vertices(self): - """This function returns the number of vertices of the simplicial - complex. - - :returns: The simplicial complex number of vertices. - :rtype: int - """ - return self.get_ptr().num_vertices() - - def num_simplices(self): - """This function returns the number of simplices of the simplicial - complex. - - :returns: the simplicial complex number of simplices. - :rtype: int - """ - return self.get_ptr().num_simplices() - - def dimension(self): - """This function returns the dimension of the simplicial complex. - - :returns: the simplicial complex dimension. - :rtype: int - - .. note:: - - This function is not constant time because it can recompute - dimension if required (can be triggered by - :func:`remove_maximal_simplex()` - or - :func:`prune_above_filtration()` - methods). - """ - return self.get_ptr().dimension() - - def upper_bound_dimension(self): - """This function returns a valid dimension upper bound of the - simplicial complex. - - :returns: an upper bound on the dimension of the simplicial complex. - :rtype: int - """ - return self.get_ptr().upper_bound_dimension() - - def set_dimension(self, dimension): - """This function sets the dimension of the simplicial complex. - - :param dimension: The new dimension value. - :type dimension: int. - - .. note:: - - This function must be used with caution because it disables - dimension recomputation when required - (this recomputation can be triggered by - :func:`remove_maximal_simplex()` - or - :func:`prune_above_filtration()` - ). - """ - self.get_ptr().set_dimension(dimension) - - def find(self, simplex): - """This function returns if the N-simplex was found in the simplicial - complex or not. - - :param simplex: The N-simplex to find, represented by a list of vertex. - :type simplex: list of int. - :returns: true if the simplex was found, false otherwise. - :rtype: bool - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - return self.get_ptr().find_simplex(csimplex) - - def insert(self, simplex, filtration=0.0): - """This function inserts the given N-simplex and its subfaces with the - given filtration value (default value is '0.0'). If some of those - simplices are already present with a higher filtration value, their - filtration value is lowered. - - :param simplex: The N-simplex to insert, represented by a list of - vertex. - :type simplex: list of int. - :param filtration: The filtration value of the simplex. - :type filtration: float. - :returns: true if the simplex was not yet in the complex, false - otherwise (whatever its original filtration value). - :rtype: bool - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - return self.get_ptr().insert_simplex_and_subfaces(csimplex, - filtration) - - def get_filtration(self): - """This function returns a list of all simplices with their given - filtration values. - - :returns: The simplices sorted by increasing filtration values. - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[pair[vector[int], double]] filtration \ - = self.get_ptr().get_filtration() - ct = [] - for filtered_complex in filtration: - v = [] - for vertex in filtered_complex.first: - v.append(vertex) - ct.append((v, filtered_complex.second)) - return ct - - def get_skeleton(self, dimension): - """This function returns the (simplices of the) skeleton of a maximum - given dimension. - - :param dimension: The skeleton dimension value. - :type dimension: int. - :returns: The (simplices of the) skeleton of a maximum dimension. - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[pair[vector[int], double]] skeleton \ - = self.get_ptr().get_skeleton(dimension) - ct = [] - for filtered_simplex in skeleton: - v = [] - for vertex in filtered_simplex.first: - v.append(vertex) - ct.append((v, filtered_simplex.second)) - return ct - - def get_star(self, simplex): - """This function returns the star of a given N-simplex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :returns: The (simplices of the) star of a simplex. - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - cdef vector[pair[vector[int], double]] star \ - = self.get_ptr().get_star(csimplex) - ct = [] - for filtered_simplex in star: - v = [] - for vertex in filtered_simplex.first: - v.append(vertex) - ct.append((v, filtered_simplex.second)) - return ct - - def get_cofaces(self, simplex, codimension): - """This function returns the cofaces of a given N-simplex with a - given codimension. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - :param codimension: The codimension. If codimension = 0, all cofaces - are returned (equivalent of get_star function) - :type codimension: int. - :returns: The (simplices of the) cofaces of a simplex - :rtype: list of tuples(simplex, filtration) - """ - cdef vector[int] csimplex - for i in simplex: - csimplex.push_back(i) - cdef vector[pair[vector[int], double]] cofaces \ - = self.get_ptr().get_cofaces(csimplex, codimension) - ct = [] - for filtered_simplex in cofaces: - v = [] - for vertex in filtered_simplex.first: - v.append(vertex) - ct.append((v, filtered_simplex.second)) - return ct - - def remove_maximal_simplex(self, simplex): - """This function removes a given maximal N-simplex from the simplicial - complex. - - :param simplex: The N-simplex, represented by a list of vertex. - :type simplex: list of int. - - .. note:: - - Be aware that removing is shifting data in a flat_map - (:func:`initialize_filtration()` to be done). - - .. note:: - - The dimension of the simplicial complex may be lower after calling - remove_maximal_simplex than it was before. However, - :func:`upper_bound_dimension()` - method will return the old value, which - remains a valid upper bound. If you care, you can call - :func:`dimension()` - to recompute the exact dimension. - """ - self.get_ptr().remove_maximal_simplex(simplex) - - def prune_above_filtration(self, filtration): - """Prune above filtration value given as parameter. - - :param filtration: Maximum threshold value. - :type filtration: float. - :returns: The filtration modification information. - :rtype: bool - - - .. note:: - - Some simplex tree functions require the filtration to be valid. - prune_above_filtration function is not launching - :func:`initialize_filtration()` - but returns the filtration modification - information. If the complex has changed , please call - :func:`initialize_filtration()` - to recompute it. - - .. note:: - - Note that the dimension of the simplicial complex may be lower - after calling - :func:`prune_above_filtration()` - than it was before. However, - :func:`upper_bound_dimension()` - will return the old value, which remains a - valid upper bound. If you care, you can call - :func:`dimension()` - method to recompute the exact dimension. - """ - return self.get_ptr().prune_above_filtration(filtration) - - def expansion(self, max_dim): - """Expands the Simplex_tree containing only its one skeleton - until dimension max_dim. - - The expanded simplicial complex until dimension :math:`d` - attached to a graph :math:`G` is the maximal simplicial complex of - dimension at most :math:`d` admitting the graph :math:`G` as - :math:`1`-skeleton. - The filtration value assigned to a simplex is the maximal filtration - value of one of its edges. - - The Simplex_tree must contain no simplex of dimension bigger than - 1 when calling the method. - - :param max_dim: The maximal dimension. - :type max_dim: int. - """ - self.get_ptr().expansion(max_dim) - - def make_filtration_non_decreasing(self): - """This function ensures that each simplex has a higher filtration - value than its faces by increasing the filtration values. - - :returns: True if any filtration value was modified, - False if the filtration was already non-decreasing. - :rtype: bool - - - .. note:: - - Some simplex tree functions require the filtration to be valid. - make_filtration_non_decreasing function is not launching - :func:`initialize_filtration()` - but returns the filtration modification - information. If the complex has changed , please call - :func:`initialize_filtration()` - to recompute it. - """ - return self.get_ptr().make_filtration_non_decreasing() - - def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False): - """This function returns the persistence of the simplicial complex. - - :param homology_coeff_field: The homology coefficient field. Must be a - prime number. Default value is 11. - :type homology_coeff_field: int. - :param min_persistence: The minimum persistence value to take into - account (strictly greater than min_persistence). Default value is - 0.0. - Sets min_persistence to -1.0 to see all values. - :type min_persistence: float. - :param persistence_dim_max: If true, the persistent homology for the - maximal dimension in the complex is computed. If false, it is - ignored. Default is false. - :type persistence_dim_max: bool - :returns: The persistence of the simplicial complex. - :rtype: list of pairs(dimension, pair(birth, death)) - """ - if self.pcohptr != NULL: - del self.pcohptr - self.pcohptr = new Simplex_tree_persistence_interface(self.get_ptr(), persistence_dim_max) - cdef vector[pair[int, pair[double, double]]] persistence_result - if self.pcohptr != NULL: - persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) - return persistence_result - - def betti_numbers(self): - """This function returns the Betti numbers of the simplicial complex. - - :returns: The Betti numbers ([B0, B1, ..., Bn]). - :rtype: list of int - - :note: betti_numbers function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[int] bn_result - if self.pcohptr != NULL: - bn_result = self.pcohptr.betti_numbers() - else: - print("betti_numbers function requires persistence function" - " to be launched first.") - return bn_result - - def persistent_betti_numbers(self, from_value, to_value): - """This function returns the persistent Betti numbers of the - simplicial complex. - - :param from_value: The persistence birth limit to be added in the - numbers (persistent birth <= from_value). - :type from_value: float. - :param to_value: The persistence death limit to be added in the - numbers (persistent death > to_value). - :type to_value: float. - - :returns: The persistent Betti numbers ([B0, B1, ..., Bn]). - :rtype: list of int - - :note: persistent_betti_numbers function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[int] pbn_result - if self.pcohptr != NULL: - pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) - else: - print("persistent_betti_numbers function requires persistence function" - " to be launched first.") - return pbn_result - - def persistence_intervals_in_dimension(self, dimension): - """This function returns the persistence intervals of the simplicial - complex in a specific dimension. - - :param dimension: The specific dimension. - :type dimension: int. - :returns: The persistence intervals. - :rtype: numpy array of dimension 2 - - :note: intervals_in_dim function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[pair[double,double]] intervals_result - if self.pcohptr != NULL: - intervals_result = self.pcohptr.intervals_in_dimension(dimension) - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") - return np_array(intervals_result) - - def persistence_pairs(self): - """This function returns a list of persistence birth and death simplices pairs. - - :returns: A list of persistence simplices intervals. - :rtype: list of pair of list of int - - :note: persistence_pairs function requires - :func:`persistence()` - function to be launched first. - """ - cdef vector[pair[vector[int],vector[int]]] persistence_pairs_result - if self.pcohptr != NULL: - persistence_pairs_result = self.pcohptr.persistence_pairs() - else: - print("persistence_pairs function requires persistence function" - " to be launched first.") - return persistence_pairs_result - - def write_persistence_diagram(self, persistence_file=''): - """This function writes the persistence intervals of the simplicial - complex in a user given file name. - - :param persistence_file: The specific dimension. - :type persistence_file: string. - - :note: intervals_in_dim function requires - :func:`persistence()` - function to be launched first. - """ - if self.pcohptr != NULL: - if persistence_file != '': - self.pcohptr.write_output_diagram(str.encode(persistence_file)) - else: - print("persistence_file must be specified") - else: - print("intervals_in_dim function requires persistence function" - " to be launched first.") diff --git a/src/cython/gudhi/strong_witness_complex.pyx b/src/cython/gudhi/strong_witness_complex.pyx deleted file mode 100644 index e757abea..00000000 --- a/src/cython/gudhi/strong_witness_complex.pyx +++ /dev/null @@ -1,78 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libc.stdint cimport intptr_t - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface": - Strong_witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - -# StrongWitnessComplex python interface -cdef class StrongWitnessComplex: - """Constructs (strong) witness complex for a given table of nearest - landmarks with respect to witnesses. - """ - - cdef Strong_witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, nearest_landmark_table=None): - """StrongWitnessComplex constructor. - - :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. - `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to - witness w, and d is the (squared) distance between l and w. - :type nearest_landmark_table: list of list of pair of int and float - """ - - # The real cython constructor - def __cinit__(self, nearest_landmark_table=None): - if nearest_landmark_table is not None: - self.thisptr = new Strong_witness_complex_interface(nearest_landmark_table) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if StrongWitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): - """ - :param max_alpha_square: The maximum relaxation parameter. - Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square) - return stree diff --git a/src/cython/gudhi/subsampling.pyx b/src/cython/gudhi/subsampling.pyx deleted file mode 100644 index 1135c1fb..00000000 --- a/src/cython/gudhi/subsampling.pyx +++ /dev/null @@ -1,130 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.string cimport string -from libcpp cimport bool -import os - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling": - vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points) - vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points, unsigned starting_point) - vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points) - vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points, unsigned starting_point) - vector[vector[double]] subsampling_n_random_points(vector[vector[double]] points, unsigned nb_points) - vector[vector[double]] subsampling_n_random_points_from_file(string off_file, unsigned nb_points) - vector[vector[double]] subsampling_sparsify_points(vector[vector[double]] points, double min_squared_dist) - vector[vector[double]] subsampling_sparsify_points_from_file(string off_file, double min_squared_dist) - -def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_point = ''): - """Subsample by a greedy strategy of iteratively adding the farthest point - from the current chosen point set to the subsampling. - The iteration starts with the landmark `starting point`. - - :param points: The input point set. - :type points: vector[vector[double]]. - - Or - - :param off_file: An OFF file style name. - :type off_file: string - - :param nb_points: Number of points of the subsample. - :type nb_points: unsigned. - :param starting_point: The iteration starts with the landmark `starting \ - point`,which is the index of the poit to start with. If not set, this \ - index is choosen randomly. - :type starting_point: unsigned. - :returns: The subsample point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - if starting_point is '': - return subsampling_n_farthest_points_from_file(str.encode(off_file), - nb_points) - else: - return subsampling_n_farthest_points_from_file(str.encode(off_file), - nb_points, - starting_point) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty points - points=[] - if starting_point is '': - return subsampling_n_farthest_points(points, nb_points) - else: - return subsampling_n_farthest_points(points, nb_points, - starting_point) - -def pick_n_random_points(points=None, off_file='', nb_points=0): - """Subsample a point set by picking random vertices. - - :param points: The input point set. - :type points: vector[vector[double]]. - - Or - - :param off_file: An OFF file style name. - :type off_file: string - - :param nb_points: Number of points of the subsample. - :type nb_points: unsigned. - :returns: The subsample point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - return subsampling_n_random_points_from_file(str.encode(off_file), - nb_points) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty points - points=[] - return subsampling_n_random_points(points, nb_points) - -def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0): - """Outputs a subset of the input points so that the squared distance - between any two points is greater than or equal to min_squared_dist. - - :param points: The input point set. - :type points: vector[vector[double]]. - - Or - - :param off_file: An OFF file style name. - :type off_file: string - - :param min_squared_dist: Minimum squared distance separating the output \ - points. - :type min_squared_dist: float. - :returns: The subsample point set. - :rtype: vector[vector[double]] - """ - if off_file is not '': - if os.path.isfile(off_file): - return subsampling_sparsify_points_from_file(str.encode(off_file), - min_squared_dist) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty points - points=[] - return subsampling_sparsify_points(points, min_squared_dist) diff --git a/src/cython/gudhi/tangential_complex.pyx b/src/cython/gudhi/tangential_complex.pyx deleted file mode 100644 index 3a945fe2..00000000 --- a/src/cython/gudhi/tangential_complex.pyx +++ /dev/null @@ -1,173 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libcpp.string cimport string -from libcpp cimport bool -from libc.stdint cimport intptr_t -import os - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "GPL v3" - -cdef extern from "Tangential_complex_interface.h" namespace "Gudhi": - cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface": - Tangential_complex_interface(int intrisic_dim, vector[vector[double]] points) - # bool from_file is a workaround for cython to find the correct signature - Tangential_complex_interface(int intrisic_dim, string off_file, bool from_file) - void compute_tangential_complex() except + - vector[double] get_point(unsigned vertex) - unsigned number_of_vertices() - unsigned number_of_simplices() - unsigned number_of_inconsistent_simplices() - unsigned number_of_inconsistent_stars() - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) - void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) - void set_max_squared_edge_length(double max_squared_edge_length) - -# TangentialComplex python interface -cdef class TangentialComplex: - """The class Tangential_complex represents a tangential complex. After the - computation of the complex, an optional post-processing called perturbation - can be run to attempt to remove inconsistencies. - """ - - cdef Tangential_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, intrisic_dim, points=None, off_file=''): - """TangentialComplex constructor. - - :param intrisic_dim: Intrinsic dimension of the manifold. - :type intrisic_dim: integer - - :param points: A list of points in d-Dimension. - :type points: list of list of double - - Or - - :param off_file: An OFF file style name. - :type off_file: string - """ - - # The real cython constructor - def __cinit__(self, intrisic_dim, points=None, off_file=''): - if off_file is not '': - if os.path.isfile(off_file): - self.thisptr = new Tangential_complex_interface(intrisic_dim, str.encode(off_file), True) - else: - print("file " + off_file + " not found.") - else: - if points is None: - # Empty tangential construction - points=[] - self.thisptr = new Tangential_complex_interface(intrisic_dim, points) - - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if TangentialComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def compute_tangential_complex(self): - """This function computes the tangential complex. - - Raises: - ValueError: In debug mode, if the computed star dimension is too - low. Try to set a bigger maximal edge length value with - :func:`~gudhi.Tangential_complex.set_max_squared_edge_length` - if this happens. - """ - self.thisptr.compute_tangential_complex() - - def get_point(self, vertex): - """This function returns the point corresponding to a given vertex. - - :param vertex: The vertex. - :type vertex: int. - :returns: The point. - :rtype: list of float - """ - cdef vector[double] point = self.thisptr.get_point(vertex) - return point - - def num_vertices(self): - """ - :returns: The number of vertices. - :rtype: unsigned - """ - return self.thisptr.number_of_vertices() - - def num_simplices(self): - """ - :returns: Total number of simplices in stars (including duplicates that appear in several stars). - :rtype: unsigned - """ - return self.thisptr.number_of_simplices() - - def num_inconsistent_simplices(self): - """ - :returns: The number of inconsistent simplices. - :rtype: unsigned - """ - return self.thisptr.number_of_inconsistent_simplices() - - def num_inconsistent_stars(self): - """ - :returns: The number of stars containing at least one inconsistent simplex. - :rtype: unsigned - """ - return self.thisptr.number_of_inconsistent_stars() - - def create_simplex_tree(self): - """Exports the complex into a simplex tree. - - :returns: A simplex tree created from the complex. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - self.thisptr.create_simplex_tree(stree_int_ptr) - return stree - - def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0): - """Attempts to fix inconsistencies by perturbing the point positions. - - :param max_perturb: Maximum length of the translations used by the - perturbation. - :type max_perturb: double - :param time_limit: Time limit in seconds. If -1, no time limit is set. - :type time_limit: double - """ - self.thisptr.fix_inconsistencies_using_perturbation(max_perturb, - time_limit) - - def set_max_squared_edge_length(self, max_squared_edge_length): - """Sets the maximal possible squared edge length for the edges in the - triangulations. - - :param max_squared_edge_length: Maximal possible squared edge length. - :type max_squared_edge_length: double - - If the maximal edge length value is too low - :func:`~gudhi.Tangential_complex.compute_tangential_complex` - will throw an exception in debug mode. - """ - self.thisptr.set_max_squared_edge_length(max_squared_edge_length) diff --git a/src/cython/gudhi/witness_complex.pyx b/src/cython/gudhi/witness_complex.pyx deleted file mode 100644 index baa70b7a..00000000 --- a/src/cython/gudhi/witness_complex.pyx +++ /dev/null @@ -1,78 +0,0 @@ -from cython cimport numeric -from libcpp.vector cimport vector -from libcpp.utility cimport pair -from libc.stdint cimport intptr_t - -from gudhi.simplex_tree cimport * -from gudhi.simplex_tree import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -cdef extern from "Witness_complex_interface.h" namespace "Gudhi": - cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface": - Witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, - unsigned limit_dimension) - -# WitnessComplex python interface -cdef class WitnessComplex: - """Constructs (weak) witness complex for a given table of nearest landmarks - with respect to witnesses. - """ - - cdef Witness_complex_interface * thisptr - - # Fake constructor that does nothing but documenting the constructor - def __init__(self, nearest_landmark_table=None): - """WitnessComplex constructor. - - :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. - `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to - witness w, and d is the (squared) distance between l and w. - :type nearest_landmark_table: list of list of pair of int and float - """ - - # The real cython constructor - def __cinit__(self, nearest_landmark_table=None): - if nearest_landmark_table is not None: - self.thisptr = new Witness_complex_interface(nearest_landmark_table) - - def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr - - def __is_defined(self): - """Returns true if WitnessComplex pointer is not NULL. - """ - return self.thisptr != NULL - - def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): - """ - :param max_alpha_square: The maximum relaxation parameter. - Default is set to infinity. - :type max_alpha_square: float - :returns: A simplex tree created from the Delaunay Triangulation. - :rtype: SimplexTree - """ - stree = SimplexTree() - cdef intptr_t stree_int_ptr=stree.thisptr - if limit_dimension is not -1: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square, limit_dimension) - else: - self.thisptr.create_simplex_tree(stree_int_ptr, - max_alpha_square) - return stree diff --git a/src/cython/include/Alpha_complex_interface.h b/src/cython/include/Alpha_complex_interface.h deleted file mode 100644 index 1199b741..00000000 --- a/src/cython/include/Alpha_complex_interface.h +++ /dev/null @@ -1,72 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_ALPHA_COMPLEX_INTERFACE_H_ -#define INCLUDE_ALPHA_COMPLEX_INTERFACE_H_ - -#include -#include -#include - -#include "Simplex_tree_interface.h" - -#include -#include -#include - -namespace Gudhi { - -namespace alpha_complex { - -class Alpha_complex_interface { - using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; - using Point_d = Dynamic_kernel::Point_d; - - public: - Alpha_complex_interface(const std::vector>& points) { - alpha_complex_ = new Alpha_complex(points); - } - - Alpha_complex_interface(const std::string& off_file_name, bool from_file = true) { - alpha_complex_ = new Alpha_complex(off_file_name); - } - - ~Alpha_complex_interface() { - delete alpha_complex_; - } - - std::vector get_point(int vh) { - std::vector vd; - try { - Point_d ph = alpha_complex_->get_point(vh); - for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) - vd.push_back(*coord); - } catch (std::out_of_range const&) { - // std::out_of_range is thrown in case not found. Other exceptions must be re-thrown - } - return vd; - } - - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square) { - alpha_complex_->create_complex(*simplex_tree, max_alpha_square); - simplex_tree->initialize_filtration(); - } - - private: - Alpha_complex* alpha_complex_; -}; - -} // namespace alpha_complex - -} // namespace Gudhi - -#endif // INCLUDE_ALPHA_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Bottleneck_distance_interface.h b/src/cython/include/Bottleneck_distance_interface.h deleted file mode 100644 index 22c9a97a..00000000 --- a/src/cython/include/Bottleneck_distance_interface.h +++ /dev/null @@ -1,43 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_ -#define INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_ - -#include - -#include -#include -#include // for std::pair - -namespace Gudhi { - -namespace persistence_diagram { - - // bottleneck_distance function renamed for the python function can be called bottleneck_dstance - double bottleneck(const std::vector>& diag1, - const std::vector>& diag2, - double e) { - return bottleneck_distance(diag1, diag2, e); - } - - double bottleneck(const std::vector>& diag1, - const std::vector>& diag2) { - return bottleneck_distance(diag1, diag2); - } - -} // namespace persistence_diagram - -} // namespace Gudhi - - -#endif // INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_ diff --git a/src/cython/include/Cubical_complex_interface.h b/src/cython/include/Cubical_complex_interface.h deleted file mode 100644 index 7d32914c..00000000 --- a/src/cython/include/Cubical_complex_interface.h +++ /dev/null @@ -1,52 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_ -#define INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_ - -#include -#include -#include - -#include -#include -#include - -namespace Gudhi { - -namespace cubical_complex { - -template> -class Cubical_complex_interface : public Bitmap_cubical_complex { - public: - Cubical_complex_interface(const std::vector& dimensions, - const std::vector& top_dimensional_cells) - : Bitmap_cubical_complex(dimensions, top_dimensional_cells) { - } - - Cubical_complex_interface(const std::vector& dimensions, - const std::vector& top_dimensional_cells, - const std::vector& periodic_dimensions) - : Bitmap_cubical_complex(dimensions, top_dimensional_cells, periodic_dimensions) { - } - - Cubical_complex_interface(const std::string& perseus_file) - : Bitmap_cubical_complex(perseus_file.c_str()) { - } -}; - -} // namespace cubical_complex - -} // namespace Gudhi - -#endif // INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_ - diff --git a/src/cython/include/Euclidean_strong_witness_complex_interface.h b/src/cython/include/Euclidean_strong_witness_complex_interface.h deleted file mode 100644 index 90bd54ac..00000000 --- a/src/cython/include/Euclidean_strong_witness_complex_interface.h +++ /dev/null @@ -1,83 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_ -#define INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_ - -#include -#include - -#include "Simplex_tree_interface.h" - -#include - -#include -#include // std::pair -#include -#include - -namespace Gudhi { - -namespace witness_complex { - - -class Euclidean_strong_witness_complex_interface { - using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; - using Point_d = Dynamic_kernel::Point_d; - - typedef typename Simplex_tree<>::Simplex_key Simplex_key; - - public: - Euclidean_strong_witness_complex_interface(const std::vector>& landmarks, - const std::vector>& witnesses) { - landmarks_.reserve(landmarks.size()); - for (auto& landmark : landmarks) - landmarks_.emplace_back(landmark.begin(), landmark.end()); - witness_complex_ = new Euclidean_strong_witness_complex(landmarks_, witnesses); - } - - ~Euclidean_strong_witness_complex_interface() { - delete witness_complex_; - } - - void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square, - std::size_t limit_dimension) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); - simplex_tree->initialize_filtration(); - } - - void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square); - simplex_tree->initialize_filtration(); - } - - std::vector get_point(unsigned vh) { - std::vector vd; - if (vh < landmarks_.size()) { - Point_d ph = witness_complex_->get_point(vh); - for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) - vd.push_back(*coord); - } - return vd; - } - - private: - std::vector landmarks_; - Euclidean_strong_witness_complex* witness_complex_; -}; - -} // namespace witness_complex - -} // namespace Gudhi - -#endif // INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_ - diff --git a/src/cython/include/Euclidean_witness_complex_interface.h b/src/cython/include/Euclidean_witness_complex_interface.h deleted file mode 100644 index 0c01a741..00000000 --- a/src/cython/include/Euclidean_witness_complex_interface.h +++ /dev/null @@ -1,82 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_ -#define INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_ - -#include -#include - -#include "Simplex_tree_interface.h" - -#include - -#include -#include // std::pair -#include -#include - -namespace Gudhi { - -namespace witness_complex { - - -class Euclidean_witness_complex_interface { - using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; - using Point_d = Dynamic_kernel::Point_d; - - typedef typename Simplex_tree<>::Simplex_key Simplex_key; - - public: - Euclidean_witness_complex_interface(const std::vector>& landmarks, - const std::vector>& witnesses) { - landmarks_.reserve(landmarks.size()); - for (auto& landmark : landmarks) - landmarks_.emplace_back(landmark.begin(), landmark.end()); - witness_complex_ = new Euclidean_witness_complex(landmarks_, witnesses); - } - - ~Euclidean_witness_complex_interface() { - delete witness_complex_; - } - - void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square, std::size_t limit_dimension) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); - simplex_tree->initialize_filtration(); - } - - void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square); - simplex_tree->initialize_filtration(); - } - - std::vector get_point(unsigned vh) { - std::vector vd; - if (vh < landmarks_.size()) { - Point_d ph = witness_complex_->get_point(vh); - for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) - vd.push_back(*coord); - } - return vd; - } - - private: - std::vector landmarks_; - Euclidean_witness_complex* witness_complex_; -}; - -} // namespace witness_complex - -} // namespace Gudhi - -#endif // INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_ - diff --git a/src/cython/include/Nerve_gic_interface.h b/src/cython/include/Nerve_gic_interface.h deleted file mode 100644 index 729b39fb..00000000 --- a/src/cython/include/Nerve_gic_interface.h +++ /dev/null @@ -1,51 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2018 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_NERVE_GIC_INTERFACE_H_ -#define INCLUDE_NERVE_GIC_INTERFACE_H_ - -#include -#include -#include - -#include "Simplex_tree_interface.h" - -#include -#include -#include - -namespace Gudhi { - -namespace cover_complex { - -class Nerve_gic_interface : public Cover_complex> { - public: - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree) { - create_complex(*simplex_tree); - simplex_tree->initialize_filtration(); - } - void set_cover_from_Euclidean_Voronoi(int m) { - set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m); - } - double set_graph_from_automatic_euclidean_rips(int N) { - return set_graph_from_automatic_rips(Gudhi::Euclidean_distance(), N); - } - void set_graph_from_euclidean_rips(double threshold) { - set_graph_from_rips(threshold, Gudhi::Euclidean_distance()); - } -}; - -} // namespace cover_complex - -} // namespace Gudhi - -#endif // INCLUDE_NERVE_GIC_INTERFACE_H_ diff --git a/src/cython/include/Off_reader_interface.h b/src/cython/include/Off_reader_interface.h deleted file mode 100644 index 4b3643be..00000000 --- a/src/cython/include/Off_reader_interface.h +++ /dev/null @@ -1,32 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_OFF_READER_INTERFACE_H_ -#define INCLUDE_OFF_READER_INTERFACE_H_ - -#include - -#include -#include -#include - -namespace Gudhi { - -std::vector> read_points_from_OFF_file(const std::string& off_file) { - Gudhi::Points_off_reader> off_reader(off_file); - return off_reader.get_point_cloud(); -} - -} // namespace Gudhi - -#endif // INCLUDE_OFF_READER_INTERFACE_H_ - diff --git a/src/cython/include/Persistent_cohomology_interface.h b/src/cython/include/Persistent_cohomology_interface.h deleted file mode 100644 index 64e2ddc8..00000000 --- a/src/cython/include/Persistent_cohomology_interface.h +++ /dev/null @@ -1,111 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ -#define INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ - -#include - -#include -#include // for std::pair -#include // for sort - -namespace Gudhi { - -template -class Persistent_cohomology_interface : public -persistent_cohomology::Persistent_cohomology { - private: - /* - * Compare two intervals by dimension, then by length. - */ - struct cmp_intervals_by_dim_then_length { - explicit cmp_intervals_by_dim_then_length(FilteredComplex * sc) - : sc_(sc) { } - - template - bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) { - if (sc_->dimension(get < 0 > (p1)) == sc_->dimension(get < 0 > (p2))) - return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1)) - > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2))); - else - return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2))); - } - FilteredComplex* sc_; - }; - - public: - Persistent_cohomology_interface(FilteredComplex* stptr) - : persistent_cohomology::Persistent_cohomology(*stptr), - stptr_(stptr) { } - - Persistent_cohomology_interface(FilteredComplex* stptr, bool persistence_dim_max) - : persistent_cohomology::Persistent_cohomology(*stptr, persistence_dim_max), - stptr_(stptr) { } - - std::vector>> get_persistence(int homology_coeff_field, - double min_persistence) { - persistent_cohomology::Persistent_cohomology::init_coefficients(homology_coeff_field); - persistent_cohomology::Persistent_cohomology::compute_persistent_cohomology(min_persistence); - - // Custom sort and output persistence - cmp_intervals_by_dim_then_length cmp(stptr_); - auto persistent_pairs = persistent_cohomology::Persistent_cohomology::get_persistent_pairs(); - std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp); - - std::vector>> persistence; - for (auto pair : persistent_pairs) { - persistence.push_back(std::make_pair(stptr_->dimension(get<0>(pair)), - std::make_pair(stptr_->filtration(get<0>(pair)), - stptr_->filtration(get<1>(pair))))); - } - return persistence; - } - - std::vector, std::vector>> persistence_pairs() { - auto pairs = persistent_cohomology::Persistent_cohomology::get_persistent_pairs(); - - std::vector, std::vector>> persistence_pairs; - persistence_pairs.reserve(pairs.size()); - for (auto pair : pairs) { - std::vector birth; - if (get<0>(pair) != stptr_->null_simplex()) { - for (auto vertex : stptr_->simplex_vertex_range(get<0>(pair))) { - birth.push_back(vertex); - } - } - - std::vector death; - if (get<1>(pair) != stptr_->null_simplex()) { - for (auto vertex : stptr_->simplex_vertex_range(get<1>(pair))) { - death.push_back(vertex); - } - } - - persistence_pairs.push_back(std::make_pair(birth, death)); - } - return persistence_pairs; - } - - private: - // A copy - FilteredComplex* stptr_; -}; - -} // namespace Gudhi - -#endif // INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ diff --git a/src/cython/include/Reader_utils_interface.h b/src/cython/include/Reader_utils_interface.h deleted file mode 100644 index 5bddf9ce..00000000 --- a/src/cython/include/Reader_utils_interface.h +++ /dev/null @@ -1,46 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2017 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_READER_UTILS_INTERFACE_H_ -#define INCLUDE_READER_UTILS_INTERFACE_H_ - -#include - -#include -#include -#include -#include -#include // for pair<> - -namespace Gudhi { - -// Redefine functions with a different name in order the original name can be used in the Python version. -std::vector> read_matrix_from_csv_file(const std::string& filename, - const char separator = ';') { - return read_lower_triangular_matrix_from_csv_file(filename, separator); -} - -inline std::map>> - read_pers_intervals_grouped_by_dimension(std::string const& filename) { - return read_persistence_intervals_grouped_by_dimension(filename); -} - -inline std::vector> - read_pers_intervals_in_dimension(std::string const& filename, int only_this_dim = -1) { - return read_persistence_intervals_in_dimension(filename, only_this_dim); -} - - -} // namespace Gudhi - - -#endif // INCLUDE_READER_UTILS_INTERFACE_H_ diff --git a/src/cython/include/Rips_complex_interface.h b/src/cython/include/Rips_complex_interface.h deleted file mode 100644 index f818a2ed..00000000 --- a/src/cython/include/Rips_complex_interface.h +++ /dev/null @@ -1,72 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_RIPS_COMPLEX_INTERFACE_H_ -#define INCLUDE_RIPS_COMPLEX_INTERFACE_H_ - -#include -#include -#include -#include - -#include - -#include "Simplex_tree_interface.h" - -#include -#include -#include // std::pair -#include - -namespace Gudhi { - -namespace rips_complex { - -class Rips_complex_interface { - using Point_d = std::vector; - using Distance_matrix = std::vector::Filtration_value>>; - - public: - void init_points(const std::vector>& points, double threshold) { - rips_complex_.emplace(points, threshold, Gudhi::Euclidean_distance()); - } - void init_matrix(const std::vector>& matrix, double threshold) { - rips_complex_.emplace(matrix, threshold); - } - - void init_points_sparse(const std::vector>& points, double threshold, double epsilon) { - sparse_rips_complex_.emplace(points, Gudhi::Euclidean_distance(), epsilon, -std::numeric_limits::infinity(), threshold); - } - void init_matrix_sparse(const std::vector>& matrix, double threshold, double epsilon) { - sparse_rips_complex_.emplace(matrix, epsilon, -std::numeric_limits::infinity(), threshold); - } - - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, int dim_max) { - if (rips_complex_) - rips_complex_->create_complex(*simplex_tree, dim_max); - else - sparse_rips_complex_->create_complex(*simplex_tree, dim_max); - simplex_tree->initialize_filtration(); - } - - private: - // std::variant would work, but we don't require C++17 yet, and boost::variant is not super convenient. - // Anyway, storing a graph would make more sense. Or changing the interface completely so there is no such storage. - boost::optional::Filtration_value>> rips_complex_; - boost::optional::Filtration_value>> sparse_rips_complex_; -}; - -} // namespace rips_complex - -} // namespace Gudhi - -#endif // INCLUDE_RIPS_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Simplex_tree_interface.h b/src/cython/include/Simplex_tree_interface.h deleted file mode 100644 index c15a44a5..00000000 --- a/src/cython/include/Simplex_tree_interface.h +++ /dev/null @@ -1,144 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_SIMPLEX_TREE_INTERFACE_H_ -#define INCLUDE_SIMPLEX_TREE_INTERFACE_H_ - -#include -#include -#include -#include - -#include "Persistent_cohomology_interface.h" - -#include -#include -#include // std::pair - -namespace Gudhi { - -template -class Simplex_tree_interface : public Simplex_tree { - public: - using Base = Simplex_tree; - using Filtration_value = typename Base::Filtration_value; - using Vertex_handle = typename Base::Vertex_handle; - using Simplex_handle = typename Base::Simplex_handle; - using Insertion_result = typename std::pair; - using Simplex = std::vector; - using Filtered_simplices = std::vector>; - - public: - bool find_simplex(const Simplex& vh) { - return (Base::find(vh) != Base::null_simplex()); - } - - void assign_simplex_filtration(const Simplex& vh, Filtration_value filtration) { - Base::assign_filtration(Base::find(vh), filtration); - } - - bool insert(const Simplex& simplex, Filtration_value filtration = 0) { - Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration); - return (result.second); - } - - // Do not interface this function, only used in alpha complex interface for complex creation - bool insert_simplex(const Simplex& simplex, Filtration_value filtration = 0) { - Insertion_result result = Base::insert_simplex(simplex, filtration); - return (result.second); - } - - // Do not interface this function, only used in interface for complex creation - bool insert_simplex_and_subfaces(const Simplex& simplex, Filtration_value filtration = 0) { - Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration); - return (result.second); - } - - // Do not interface this function, only used in strong witness interface for complex creation - bool insert_simplex(const std::vector& simplex, Filtration_value filtration = 0) { - Insertion_result result = Base::insert_simplex(simplex, filtration); - return (result.second); - } - - // Do not interface this function, only used in strong witness interface for complex creation - bool insert_simplex_and_subfaces(const std::vector& simplex, Filtration_value filtration = 0) { - Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration); - return (result.second); - } - - Filtration_value simplex_filtration(const Simplex& simplex) { - return Base::filtration(Base::find(simplex)); - } - - void remove_maximal_simplex(const Simplex& simplex) { - Base::remove_maximal_simplex(Base::find(simplex)); - Base::initialize_filtration(); - } - - Filtered_simplices get_filtration() { - Base::initialize_filtration(); - Filtered_simplices filtrations; - for (auto f_simplex : Base::filtration_simplex_range()) { - Simplex simplex; - for (auto vertex : Base::simplex_vertex_range(f_simplex)) { - simplex.insert(simplex.begin(), vertex); - } - filtrations.push_back(std::make_pair(simplex, Base::filtration(f_simplex))); - } - return filtrations; - } - - Filtered_simplices get_skeleton(int dimension) { - Filtered_simplices skeletons; - for (auto f_simplex : Base::skeleton_simplex_range(dimension)) { - Simplex simplex; - for (auto vertex : Base::simplex_vertex_range(f_simplex)) { - simplex.insert(simplex.begin(), vertex); - } - skeletons.push_back(std::make_pair(simplex, Base::filtration(f_simplex))); - } - return skeletons; - } - - Filtered_simplices get_star(const Simplex& simplex) { - Filtered_simplices star; - for (auto f_simplex : Base::star_simplex_range(Base::find(simplex))) { - Simplex simplex_star; - for (auto vertex : Base::simplex_vertex_range(f_simplex)) { - simplex_star.insert(simplex_star.begin(), vertex); - } - star.push_back(std::make_pair(simplex_star, Base::filtration(f_simplex))); - } - return star; - } - - Filtered_simplices get_cofaces(const Simplex& simplex, int dimension) { - Filtered_simplices cofaces; - for (auto f_simplex : Base::cofaces_simplex_range(Base::find(simplex), dimension)) { - Simplex simplex_coface; - for (auto vertex : Base::simplex_vertex_range(f_simplex)) { - simplex_coface.insert(simplex_coface.begin(), vertex); - } - cofaces.push_back(std::make_pair(simplex_coface, Base::filtration(f_simplex))); - } - return cofaces; - } - - void create_persistence(Gudhi::Persistent_cohomology_interface* pcoh) { - Base::initialize_filtration(); - pcoh = new Gudhi::Persistent_cohomology_interface(*this); - } -}; - -} // namespace Gudhi - -#endif // INCLUDE_SIMPLEX_TREE_INTERFACE_H_ diff --git a/src/cython/include/Strong_witness_complex_interface.h b/src/cython/include/Strong_witness_complex_interface.h deleted file mode 100644 index 4c333da8..00000000 --- a/src/cython/include/Strong_witness_complex_interface.h +++ /dev/null @@ -1,63 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_ -#define INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_ - -#include -#include - -#include "Simplex_tree_interface.h" - -#include -#include // std::pair -#include -#include - -namespace Gudhi { - -namespace witness_complex { - -class Strong_witness_complex_interface { - using Nearest_landmark_range = std::vector>; - using Nearest_landmark_table = std::vector; - - public: - Strong_witness_complex_interface(const Nearest_landmark_table& nlt) { - witness_complex_ = new Strong_witness_complex(nlt); - } - - ~Strong_witness_complex_interface() { - delete witness_complex_; - } - - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, - std::size_t limit_dimension) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); - simplex_tree->initialize_filtration(); - } - - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, - double max_alpha_square) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square); - simplex_tree->initialize_filtration(); - } - - private: - Strong_witness_complex* witness_complex_; -}; - -} // namespace witness_complex - -} // namespace Gudhi - -#endif // INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Subsampling_interface.h b/src/cython/include/Subsampling_interface.h deleted file mode 100644 index bc390485..00000000 --- a/src/cython/include/Subsampling_interface.h +++ /dev/null @@ -1,109 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_SUBSAMPLING_INTERFACE_H_ -#define INCLUDE_SUBSAMPLING_INTERFACE_H_ - -#include -#include -#include -#include -#include - -#include -#include -#include - -namespace Gudhi { - -namespace subsampling { - -using Subsampling_dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; -using Subsampling_point_d = Subsampling_dynamic_kernel::Point_d; -using Subsampling_ft = Subsampling_dynamic_kernel::FT; - -// ------ choose_n_farthest_points ------ -std::vector> subsampling_n_farthest_points(const std::vector>& points, - unsigned nb_points) { - std::vector> landmarks; - Subsampling_dynamic_kernel k; - choose_n_farthest_points(k, points, nb_points, random_starting_point, std::back_inserter(landmarks)); - - return landmarks; -} - -std::vector> subsampling_n_farthest_points(const std::vector>& points, - unsigned nb_points, unsigned starting_point) { - std::vector> landmarks; - Subsampling_dynamic_kernel k; - choose_n_farthest_points(k, points, nb_points, starting_point, std::back_inserter(landmarks)); - - return landmarks; -} - -std::vector> subsampling_n_farthest_points_from_file(const std::string& off_file, - unsigned nb_points) { - Gudhi::Points_off_reader> off_reader(off_file); - std::vector> points = off_reader.get_point_cloud(); - return subsampling_n_farthest_points(points, nb_points); -} - -std::vector> subsampling_n_farthest_points_from_file(const std::string& off_file, - unsigned nb_points, unsigned starting_point) { - Gudhi::Points_off_reader> off_reader(off_file); - std::vector> points = off_reader.get_point_cloud(); - return subsampling_n_farthest_points(points, nb_points, starting_point); -} - -// ------ pick_n_random_points ------ -std::vector> subsampling_n_random_points(const std::vector>& points, - unsigned nb_points) { - std::vector> landmarks; - pick_n_random_points(points, nb_points, std::back_inserter(landmarks)); - - return landmarks; -} - -std::vector> subsampling_n_random_points_from_file(const std::string& off_file, - unsigned nb_points) { - Gudhi::Points_off_reader> off_reader(off_file); - std::vector> points = off_reader.get_point_cloud(); - return subsampling_n_random_points(points, nb_points); -} - -// ------ sparsify_point_set ------ -std::vector> subsampling_sparsify_points(const std::vector>& points, - double min_squared_dist) { - std::vector input, output; - for (auto point : points) - input.push_back(Subsampling_point_d(point.size(), point.begin(), point.end())); - Subsampling_dynamic_kernel k; - sparsify_point_set(k, input, min_squared_dist, std::back_inserter(output)); - - std::vector> landmarks; - for (auto point : output) - landmarks.push_back(std::vector(point.cartesian_begin(), point.cartesian_end())); - return landmarks; -} - -std::vector> subsampling_sparsify_points_from_file(const std::string& off_file, - double min_squared_dist) { - Gudhi::Points_off_reader> off_reader(off_file); - std::vector> points = off_reader.get_point_cloud(); - return subsampling_sparsify_points(points, min_squared_dist); -} - -} // namespace subsampling - -} // namespace Gudhi - -#endif // INCLUDE_SUBSAMPLING_INTERFACE_H_ diff --git a/src/cython/include/Tangential_complex_interface.h b/src/cython/include/Tangential_complex_interface.h deleted file mode 100644 index 7c3f2789..00000000 --- a/src/cython/include/Tangential_complex_interface.h +++ /dev/null @@ -1,111 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_ -#define INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_ - -#include -#include -#include -#include - -#include "Simplex_tree_interface.h" - -#include -#include // std::pair -#include -#include - -namespace Gudhi { - -namespace tangential_complex { - -class Tangential_complex_interface { - using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; - using Point_d = Dynamic_kernel::Point_d; - using TC = Tangential_complex; - - public: - Tangential_complex_interface(int intrisic_dim, const std::vector>& points) { - Dynamic_kernel k; - - tangential_complex_ = new TC(points, intrisic_dim, k); - } - - Tangential_complex_interface(int intrisic_dim, const std::string& off_file_name, bool from_file = true) { - Dynamic_kernel k; - - Gudhi::Points_off_reader off_reader(off_file_name); - std::vector points = off_reader.get_point_cloud(); - - tangential_complex_ = new TC(points, intrisic_dim, k); - } - - ~Tangential_complex_interface() { - delete tangential_complex_; - } - - void compute_tangential_complex() { - tangential_complex_->compute_tangential_complex(); - num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices(); - } - - std::vector get_point(unsigned vh) { - std::vector vd; - if (vh < tangential_complex_->number_of_vertices()) { - Point_d ph = tangential_complex_->get_point(vh); - for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) - vd.push_back(*coord); - } - return vd; - } - - unsigned number_of_vertices() { - return tangential_complex_->number_of_vertices(); - } - - unsigned number_of_simplices() { - return num_inconsistencies_.num_simplices; - } - - unsigned number_of_inconsistent_simplices() { - return num_inconsistencies_.num_inconsistent_simplices; - } - - unsigned number_of_inconsistent_stars() { - return num_inconsistencies_.num_inconsistent_stars; - } - - void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) { - tangential_complex_->fix_inconsistencies_using_perturbation(max_perturb, time_limit); - num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices(); - } - - void create_simplex_tree(Simplex_tree<>* simplex_tree) { - tangential_complex_->create_complex>(*simplex_tree); - simplex_tree->initialize_filtration(); - } - - void set_max_squared_edge_length(double max_squared_edge_length) { - tangential_complex_->set_max_squared_edge_length(max_squared_edge_length); - } - -private: - TC* tangential_complex_; - TC::Num_inconsistencies num_inconsistencies_; -}; - -} // namespace tangential_complex - -} // namespace Gudhi - -#endif // INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_ diff --git a/src/cython/include/Witness_complex_interface.h b/src/cython/include/Witness_complex_interface.h deleted file mode 100644 index 609277d6..00000000 --- a/src/cython/include/Witness_complex_interface.h +++ /dev/null @@ -1,64 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2016 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef INCLUDE_WITNESS_COMPLEX_INTERFACE_H_ -#define INCLUDE_WITNESS_COMPLEX_INTERFACE_H_ - -#include -#include - -#include "Simplex_tree_interface.h" - -#include -#include // std::pair -#include -#include - -namespace Gudhi { - -namespace witness_complex { - -class Witness_complex_interface { - using Nearest_landmark_range = std::vector>; - using Nearest_landmark_table = std::vector; - - public: - Witness_complex_interface(const Nearest_landmark_table& nlt) { - witness_complex_ = new Witness_complex(nlt); - } - - ~Witness_complex_interface() { - delete witness_complex_; - } - - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, - std::size_t limit_dimension) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); - simplex_tree->initialize_filtration(); - } - - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, - double max_alpha_square) { - witness_complex_->create_complex(*simplex_tree, max_alpha_square); - simplex_tree->initialize_filtration(); - } - - private: - Witness_complex* witness_complex_; -}; - -} // namespace witness_complex - -} // namespace Gudhi - -#endif // INCLUDE_WITNESS_COMPLEX_INTERFACE_H_ - diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in deleted file mode 100644 index 974dfb7a..00000000 --- a/src/cython/setup.py.in +++ /dev/null @@ -1,53 +0,0 @@ -from setuptools import setup, Extension -from Cython.Build import cythonize -from numpy import get_include as numpy_get_include - -"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2019 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - -modules = [@GUDHI_CYTHON_MODULES_TO_COMPILE@] - -source_dir='@CMAKE_CURRENT_SOURCE_DIR@/gudhi/' -extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@] -extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@] -libraries=[@GUDHI_CYTHON_LIBRARIES@] -library_dirs=[@GUDHI_CYTHON_LIBRARY_DIRS@] -include_dirs = [numpy_get_include(), '@CMAKE_CURRENT_SOURCE_DIR@/gudhi/', @GUDHI_CYTHON_INCLUDE_DIRS@] -runtime_library_dirs=[@GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS@] - -# Create ext_modules list from module list -ext_modules = [] -for module in modules: - ext_modules.append(Extension( - 'gudhi.' + module, - sources = [source_dir + module + '.pyx',], - language = 'c++', - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=libraries, - library_dirs=library_dirs, - include_dirs=include_dirs, - runtime_library_dirs=runtime_library_dirs,)) - -setup( - name = 'gudhi', - packages=["gudhi",], - author='GUDHI Editorial Board', - author_email='gudhi-contact@lists.gforge.inria.fr', - version='@GUDHI_VERSION@', - url='http://gudhi.gforge.inria.fr/', - ext_modules = cythonize(ext_modules), - install_requires = ['cython','numpy >= 1.9',], - setup_requires = ['numpy >= 1.9',], -) diff --git a/src/cython/test/test_alpha_complex.py b/src/cython/test/test_alpha_complex.py deleted file mode 100755 index 24f8bf53..00000000 --- a/src/cython/test/test_alpha_complex.py +++ /dev/null @@ -1,90 +0,0 @@ -from gudhi import AlphaComplex, SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_empty_alpha(): - alpha_complex = AlphaComplex(points=[[0, 0]]) - assert alpha_complex.__is_defined() == True - - -def test_infinite_alpha(): - point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - alpha_complex = AlphaComplex(points=point_list) - assert alpha_complex.__is_defined() == True - - simplex_tree = alpha_complex.create_simplex_tree() - assert simplex_tree.__is_persistence_defined() == False - - assert simplex_tree.num_simplices() == 11 - assert simplex_tree.num_vertices() == 4 - - assert simplex_tree.get_filtration() == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([3], 0.0), - ([0, 1], 0.25), - ([0, 2], 0.25), - ([1, 3], 0.25), - ([2, 3], 0.25), - ([1, 2], 0.5), - ([0, 1, 2], 0.5), - ([1, 2, 3], 0.5), - ] - assert simplex_tree.get_star([0]) == [ - ([0], 0.0), - ([0, 1], 0.25), - ([0, 1, 2], 0.5), - ([0, 2], 0.25), - ] - assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)] - - assert point_list[0] == alpha_complex.get_point(0) - assert point_list[1] == alpha_complex.get_point(1) - assert point_list[2] == alpha_complex.get_point(2) - assert point_list[3] == alpha_complex.get_point(3) - assert alpha_complex.get_point(4) == [] - assert alpha_complex.get_point(125) == [] - - -def test_filtered_alpha(): - point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - filtered_alpha = AlphaComplex(points=point_list) - - simplex_tree = filtered_alpha.create_simplex_tree(max_alpha_square=0.25) - - assert simplex_tree.num_simplices() == 8 - assert simplex_tree.num_vertices() == 4 - - assert point_list[0] == filtered_alpha.get_point(0) - assert point_list[1] == filtered_alpha.get_point(1) - assert point_list[2] == filtered_alpha.get_point(2) - assert point_list[3] == filtered_alpha.get_point(3) - assert filtered_alpha.get_point(4) == [] - assert filtered_alpha.get_point(125) == [] - - assert simplex_tree.get_filtration() == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([3], 0.0), - ([0, 1], 0.25), - ([0, 2], 0.25), - ([1, 3], 0.25), - ([2, 3], 0.25), - ] - assert simplex_tree.get_star([0]) == [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)] - assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)] diff --git a/src/cython/test/test_bottleneck_distance.py b/src/cython/test/test_bottleneck_distance.py deleted file mode 100755 index f5f019b9..00000000 --- a/src/cython/test/test_bottleneck_distance.py +++ /dev/null @@ -1,23 +0,0 @@ -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_basic_bottleneck(): - diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]] - diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]] - - assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == 0.8081763781405569 - assert gudhi.bottleneck_distance(diag1, diag2) == 0.75 diff --git a/src/cython/test/test_cover_complex.py b/src/cython/test/test_cover_complex.py deleted file mode 100755 index 8cd12272..00000000 --- a/src/cython/test/test_cover_complex.py +++ /dev/null @@ -1,85 +0,0 @@ -from gudhi import CoverComplex - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2018 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2018 Inria" -__license__ = "MIT" - - -def test_empty_constructor(): - # Try to create an empty CoverComplex - cover = CoverComplex() - assert cover.__is_defined() == True - - -def test_non_existing_file_read(): - # Try to open a non existing file - cover = CoverComplex() - assert cover.read_point_cloud("pouetpouettralala.toubiloubabdou") == False - - -def test_files_creation(): - # Create test file - cloud_file = open("cloud", "w") - cloud_file.write("nOFF\n3\n3 0 0\n0 0 0\n2 1 0\n4 0 0") - cloud_file.close() - cover_file = open("cover", "w") - cover_file.write("1\n2\n3") - cover_file.close() - graph_file = open("graph", "w") - graph_file.write("0 1\n0 2\n1 2") - graph_file.close() - - -def test_nerve(): - nerve = CoverComplex() - nerve.set_type("Nerve") - assert nerve.read_point_cloud("cloud") == True - nerve.set_color_from_coordinate() - nerve.set_graph_from_file("graph") - nerve.set_cover_from_file("cover") - nerve.find_simplices() - stree = nerve.create_simplex_tree() - - assert stree.num_vertices() == 3 - assert (stree.num_simplices() - stree.num_vertices()) == 0 - assert stree.dimension() == 0 - - -def test_graph_induced_complex(): - gic = CoverComplex() - gic.set_type("GIC") - assert gic.read_point_cloud("cloud") == True - gic.set_color_from_coordinate() - gic.set_graph_from_file("graph") - gic.set_cover_from_file("cover") - gic.find_simplices() - stree = gic.create_simplex_tree() - - assert stree.num_vertices() == 3 - assert (stree.num_simplices() - stree.num_vertices()) == 4 - assert stree.dimension() == 2 - - -def test_voronoi_graph_induced_complex(): - gic = CoverComplex() - gic.set_type("GIC") - assert gic.read_point_cloud("cloud") == True - gic.set_color_from_coordinate() - gic.set_graph_from_file("graph") - gic.set_cover_from_Voronoi(2) - gic.find_simplices() - stree = gic.create_simplex_tree() - - assert stree.num_vertices() == 2 - assert (stree.num_simplices() - stree.num_vertices()) == 1 - assert stree.dimension() == 1 diff --git a/src/cython/test/test_cubical_complex.py b/src/cython/test/test_cubical_complex.py deleted file mode 100755 index 68f54fbe..00000000 --- a/src/cython/test/test_cubical_complex.py +++ /dev/null @@ -1,98 +0,0 @@ -from gudhi import CubicalComplex - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_empty_constructor(): - # Try to create an empty CubicalComplex - cub = CubicalComplex() - assert cub.__is_defined() == False - assert cub.__is_persistence_defined() == False - - -def test_non_existing_perseus_file_constructor(): - # Try to open a non existing file - cub = CubicalComplex(perseus_file="pouetpouettralala.toubiloubabdou") - assert cub.__is_defined() == False - assert cub.__is_persistence_defined() == False - - -def test_dimension_or_perseus_file_constructor(): - # Create test file - test_file = open("CubicalOneSphere.txt", "w") - test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n") - test_file.close() - # CubicalComplex can be constructed from dimensions and - # top_dimensional_cells OR from a Perseus-style file name. - cub = CubicalComplex( - dimensions=[3, 3], - top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9], - perseus_file="CubicalOneSphere.txt", - ) - assert cub.__is_defined() == False - assert cub.__is_persistence_defined() == False - - cub = CubicalComplex( - top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9], - perseus_file="CubicalOneSphere.txt", - ) - assert cub.__is_defined() == False - assert cub.__is_persistence_defined() == False - - cub = CubicalComplex(dimensions=[3, 3], perseus_file="CubicalOneSphere.txt") - assert cub.__is_defined() == False - assert cub.__is_persistence_defined() == False - - -def test_dimension_simple_constructor(): - cub = CubicalComplex( - dimensions=[3, 3], top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9] - ) - assert cub.__is_defined() == True - assert cub.__is_persistence_defined() == False - assert cub.persistence() == [(0, (1.0, float("inf")))] - assert cub.__is_persistence_defined() == True - assert cub.betti_numbers() == [1, 0, 0] - assert cub.persistent_betti_numbers(0, 1000) == [0, 0, 0] - - -def test_user_case_simple_constructor(): - cub = CubicalComplex( - dimensions=[3, 3], - top_dimensional_cells=[float("inf"), 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], - ) - assert cub.__is_defined() == True - assert cub.__is_persistence_defined() == False - assert cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))] - assert cub.__is_persistence_defined() == True - other_cub = CubicalComplex( - dimensions=[3, 3], - top_dimensional_cells=[1000.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], - ) - assert other_cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))] - - -def test_dimension_file_constructor(): - # Create test file - test_file = open("CubicalOneSphere.txt", "w") - test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n") - test_file.close() - cub = CubicalComplex(perseus_file="CubicalOneSphere.txt") - assert cub.__is_defined() == True - assert cub.__is_persistence_defined() == False - assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float("inf")))] - assert cub.__is_persistence_defined() == True - assert cub.betti_numbers() == [1, 0, 0] - assert cub.persistent_betti_numbers(0, 1000) == [1, 0, 0] diff --git a/src/cython/test/test_euclidean_witness_complex.py b/src/cython/test/test_euclidean_witness_complex.py deleted file mode 100755 index f5eae5fa..00000000 --- a/src/cython/test/test_euclidean_witness_complex.py +++ /dev/null @@ -1,95 +0,0 @@ -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_empty_euclidean_witness_complex(): - euclidean_witness = gudhi.EuclideanWitnessComplex() - assert euclidean_witness.__is_defined() == False - - -def test_witness_complex(): - point_cloud = [ - [1.0, 1.0], - [7.0, 0.0], - [4.0, 6.0], - [9.0, 6.0], - [0.0, 14.0], - [2.0, 19.0], - [9.0, 17.0], - ] - landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]] - euclidean_witness_complex = gudhi.EuclideanWitnessComplex( - landmarks=landmarks, witnesses=point_cloud - ) - simplex_tree = euclidean_witness_complex.create_simplex_tree(max_alpha_square=4.1) - - assert landmarks[0] == euclidean_witness_complex.get_point(0) - assert landmarks[1] == euclidean_witness_complex.get_point(1) - assert landmarks[2] == euclidean_witness_complex.get_point(2) - - assert simplex_tree.get_filtration() == [ - ([0], 0.0), - ([1], 0.0), - ([0, 1], 0.0), - ([2], 0.0), - ([0, 2], 0.0), - ([1, 2], 0.0), - ([0, 1, 2], 0.0), - ] - - -def test_empty_euclidean_strong_witness_complex(): - euclidean_strong_witness = gudhi.EuclideanStrongWitnessComplex() - assert euclidean_strong_witness.__is_defined() == False - - -def test_strong_witness_complex(): - point_cloud = [ - [1.0, 1.0], - [7.0, 0.0], - [4.0, 6.0], - [9.0, 6.0], - [0.0, 14.0], - [2.0, 19.0], - [9.0, 17.0], - ] - landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]] - euclidean_strong_witness_complex = gudhi.EuclideanStrongWitnessComplex( - landmarks=landmarks, witnesses=point_cloud - ) - simplex_tree = euclidean_strong_witness_complex.create_simplex_tree( - max_alpha_square=14.9 - ) - - assert landmarks[0] == euclidean_strong_witness_complex.get_point(0) - assert landmarks[1] == euclidean_strong_witness_complex.get_point(1) - assert landmarks[2] == euclidean_strong_witness_complex.get_point(2) - - assert simplex_tree.get_filtration() == [([0], 0.0), ([1], 0.0), ([2], 0.0)] - - simplex_tree = euclidean_strong_witness_complex.create_simplex_tree( - max_alpha_square=100.0 - ) - - assert simplex_tree.get_filtration() == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([1, 2], 15.0), - ([0, 2], 34.0), - ([0, 1], 37.0), - ([0, 1, 2], 37.0), - ] diff --git a/src/cython/test/test_reader_utils.py b/src/cython/test/test_reader_utils.py deleted file mode 100755 index 4c7b32c2..00000000 --- a/src/cython/test/test_reader_utils.py +++ /dev/null @@ -1,126 +0,0 @@ -import gudhi -import numpy as np - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2017 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2017 Inria" -__license__ = "MIT" - - -def test_non_existing_csv_file(): - # Try to open a non existing file - matrix = gudhi.read_lower_triangular_matrix_from_csv_file( - csv_file="pouetpouettralala.toubiloubabdou" - ) - assert matrix == [] - - -def test_full_square_distance_matrix_csv_file(): - # Create test file - test_file = open("full_square_distance_matrix.csv", "w") - test_file.write("0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;") - test_file.close() - matrix = gudhi.read_lower_triangular_matrix_from_csv_file( - csv_file="full_square_distance_matrix.csv" - ) - assert matrix == [[], [1.0], [2.0, 4.0], [3.0, 5.0, 6.0]] - - -def test_lower_triangular_distance_matrix_csv_file(): - # Create test file - test_file = open("lower_triangular_distance_matrix.csv", "w") - test_file.write("\n1,\n2,3,\n4,5,6,\n7,8,9,10,") - test_file.close() - matrix = gudhi.read_lower_triangular_matrix_from_csv_file( - csv_file="lower_triangular_distance_matrix.csv", separator="," - ) - assert matrix == [[], [1.0], [2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0, 10.0]] - - -def test_non_existing_persistence_file(): - # Try to open a non existing file - persistence = gudhi.read_persistence_intervals_grouped_by_dimension( - persistence_file="pouetpouettralala.toubiloubabdou" - ) - assert persistence == [] - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="pouetpouettralala.toubiloubabdou", only_this_dim=1 - ) - np.testing.assert_array_equal(persistence, []) - - -def test_read_persistence_intervals_without_dimension(): - # Create test file - test_file = open("persistence_intervals_without_dimension.pers", "w") - test_file.write( - "# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf" - ) - test_file.close() - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_without_dimension.pers" - ) - np.testing.assert_array_equal( - persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] - ) - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=0 - ) - np.testing.assert_array_equal(persistence, []) - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=1 - ) - np.testing.assert_array_equal(persistence, []) - persistence = gudhi.read_persistence_intervals_grouped_by_dimension( - persistence_file="persistence_intervals_without_dimension.pers" - ) - assert persistence == { - -1: [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] - } - - -def test_read_persistence_intervals_with_dimension(): - # Create test file - test_file = open("persistence_intervals_with_dimension.pers", "w") - test_file.write( - "# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf" - ) - test_file.close() - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_with_dimension.pers" - ) - np.testing.assert_array_equal( - persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] - ) - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=0 - ) - np.testing.assert_array_equal(persistence, [(2.7, 3.7)]) - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=1 - ) - np.testing.assert_array_equal(persistence, [(9.6, 14.0), (3.0, float("Inf"))]) - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=2 - ) - np.testing.assert_array_equal(persistence, []) - persistence = gudhi.read_persistence_intervals_in_dimension( - persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=3 - ) - np.testing.assert_array_equal(persistence, [(34.2, 34.974)]) - persistence = gudhi.read_persistence_intervals_grouped_by_dimension( - persistence_file="persistence_intervals_with_dimension.pers" - ) - assert persistence == { - 0: [(2.7, 3.7)], - 1: [(9.6, 14.0), (3.0, float("Inf"))], - 3: [(34.2, 34.974)], - } diff --git a/src/cython/test/test_rips_complex.py b/src/cython/test/test_rips_complex.py deleted file mode 100755 index d55ae22f..00000000 --- a/src/cython/test/test_rips_complex.py +++ /dev/null @@ -1,133 +0,0 @@ -from gudhi import RipsComplex -from math import sqrt - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_empty_rips(): - rips_complex = RipsComplex() - - -def test_rips_from_points(): - point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - rips_complex = RipsComplex(points=point_list, max_edge_length=42) - - simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) - - assert simplex_tree.__is_defined() == True - assert simplex_tree.__is_persistence_defined() == False - - assert simplex_tree.num_simplices() == 10 - assert simplex_tree.num_vertices() == 4 - - assert simplex_tree.get_filtration() == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([3], 0.0), - ([0, 1], 1.0), - ([0, 2], 1.0), - ([1, 3], 1.0), - ([2, 3], 1.0), - ([1, 2], 1.4142135623730951), - ([0, 3], 1.4142135623730951), - ] - assert simplex_tree.get_star([0]) == [ - ([0], 0.0), - ([0, 1], 1.0), - ([0, 2], 1.0), - ([0, 3], 1.4142135623730951), - ] - assert simplex_tree.get_cofaces([0], 1) == [ - ([0, 1], 1.0), - ([0, 2], 1.0), - ([0, 3], 1.4142135623730951), - ] - - -def test_filtered_rips_from_points(): - point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0) - - simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) - - assert simplex_tree.__is_defined() == True - assert simplex_tree.__is_persistence_defined() == False - - assert simplex_tree.num_simplices() == 8 - assert simplex_tree.num_vertices() == 4 - - -def test_sparse_filtered_rips_from_points(): - point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0, sparse=0.001) - - simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) - - assert simplex_tree.__is_defined() == True - assert simplex_tree.__is_persistence_defined() == False - - assert simplex_tree.num_simplices() == 8 - assert simplex_tree.num_vertices() == 4 - - -def test_rips_from_distance_matrix(): - distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] - rips_complex = RipsComplex(distance_matrix=distance_matrix, max_edge_length=42) - - simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) - - assert simplex_tree.__is_defined() == True - assert simplex_tree.__is_persistence_defined() == False - - assert simplex_tree.num_simplices() == 10 - assert simplex_tree.num_vertices() == 4 - - assert simplex_tree.get_filtration() == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([3], 0.0), - ([0, 1], 1.0), - ([0, 2], 1.0), - ([1, 3], 1.0), - ([2, 3], 1.0), - ([1, 2], 1.4142135623730951), - ([0, 3], 1.4142135623730951), - ] - assert simplex_tree.get_star([0]) == [ - ([0], 0.0), - ([0, 1], 1.0), - ([0, 2], 1.0), - ([0, 3], 1.4142135623730951), - ] - assert simplex_tree.get_cofaces([0], 1) == [ - ([0, 1], 1.0), - ([0, 2], 1.0), - ([0, 3], 1.4142135623730951), - ] - - -def test_filtered_rips_from_distance_matrix(): - distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] - filtered_rips = RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0) - - simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) - - assert simplex_tree.__is_defined() == True - assert simplex_tree.__is_persistence_defined() == False - - assert simplex_tree.num_simplices() == 8 - assert simplex_tree.num_vertices() == 4 diff --git a/src/cython/test/test_simplex_tree.py b/src/cython/test/test_simplex_tree.py deleted file mode 100755 index 8d8971c1..00000000 --- a/src/cython/test/test_simplex_tree.py +++ /dev/null @@ -1,250 +0,0 @@ -from gudhi import SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_insertion(): - st = SimplexTree() - assert st.__is_defined() == True - assert st.__is_persistence_defined() == False - - # insert test - assert st.insert([0, 1]) == True - - assert st.dimension() == 1 - - assert st.insert([0, 1, 2], filtration=4.0) == True - - assert st.dimension() == 2 - - assert st.num_simplices() == 7 - assert st.num_vertices() == 3 - - # find test - assert st.find([0, 1, 2]) == True - assert st.find([0, 1]) == True - assert st.find([0, 2]) == True - assert st.find([0]) == True - assert st.find([1]) == True - assert st.find([2]) == True - assert st.find([3]) == False - assert st.find([0, 3]) == False - assert st.find([1, 3]) == False - assert st.find([2, 3]) == False - - # filtration test - st.initialize_filtration() - assert st.filtration([0, 1, 2]) == 4.0 - assert st.filtration([0, 2]) == 4.0 - assert st.filtration([1, 2]) == 4.0 - assert st.filtration([2]) == 4.0 - assert st.filtration([0, 1]) == 0.0 - assert st.filtration([0]) == 0.0 - assert st.filtration([1]) == 0.0 - - # skeleton test - assert st.get_skeleton(2) == [ - ([0, 1, 2], 4.0), - ([0, 1], 0.0), - ([0, 2], 4.0), - ([0], 0.0), - ([1, 2], 4.0), - ([1], 0.0), - ([2], 4.0), - ] - assert st.get_skeleton(1) == [ - ([0, 1], 0.0), - ([0, 2], 4.0), - ([0], 0.0), - ([1, 2], 4.0), - ([1], 0.0), - ([2], 4.0), - ] - assert st.get_skeleton(0) == [([0], 0.0), ([1], 0.0), ([2], 4.0)] - - # remove_maximal_simplex test - assert st.get_cofaces([0, 1, 2], 1) == [] - st.remove_maximal_simplex([0, 1, 2]) - assert st.get_skeleton(2) == [ - ([0, 1], 0.0), - ([0, 2], 4.0), - ([0], 0.0), - ([1, 2], 4.0), - ([1], 0.0), - ([2], 4.0), - ] - assert st.find([0, 1, 2]) == False - assert st.find([0, 1]) == True - assert st.find([0, 2]) == True - assert st.find([0]) == True - assert st.find([1]) == True - assert st.find([2]) == True - - st.initialize_filtration() - assert st.persistence(persistence_dim_max=True) == [ - (1, (4.0, float("inf"))), - (0, (0.0, float("inf"))), - ] - assert st.__is_persistence_defined() == True - - assert st.betti_numbers() == [1, 1] - assert st.persistent_betti_numbers(-0.1, 10000.0) == [0, 0] - assert st.persistent_betti_numbers(0.0, 10000.0) == [1, 0] - assert st.persistent_betti_numbers(3.9, 10000.0) == [1, 0] - assert st.persistent_betti_numbers(4.0, 10000.0) == [1, 1] - assert st.persistent_betti_numbers(9999.0, 10000.0) == [1, 1] - - -def test_expansion(): - st = SimplexTree() - assert st.__is_defined() == True - assert st.__is_persistence_defined() == False - - # insert test - assert st.insert([3, 2], 0.1) == True - assert st.insert([2, 0], 0.2) == True - assert st.insert([1, 0], 0.3) == True - assert st.insert([3, 1], 0.4) == True - assert st.insert([2, 1], 0.5) == True - assert st.insert([6, 5], 0.6) == True - assert st.insert([4, 2], 0.7) == True - assert st.insert([3, 0], 0.8) == True - assert st.insert([6, 4], 0.9) == True - assert st.insert([6, 3], 1.0) == True - - assert st.num_vertices() == 7 - assert st.num_simplices() == 17 - assert st.get_filtration() == [ - ([2], 0.1), - ([3], 0.1), - ([2, 3], 0.1), - ([0], 0.2), - ([0, 2], 0.2), - ([1], 0.3), - ([0, 1], 0.3), - ([1, 3], 0.4), - ([1, 2], 0.5), - ([5], 0.6), - ([6], 0.6), - ([5, 6], 0.6), - ([4], 0.7), - ([2, 4], 0.7), - ([0, 3], 0.8), - ([4, 6], 0.9), - ([3, 6], 1.0), - ] - - st.expansion(3) - assert st.num_vertices() == 7 - assert st.num_simplices() == 22 - st.initialize_filtration() - - assert st.get_filtration() == [ - ([2], 0.1), - ([3], 0.1), - ([2, 3], 0.1), - ([0], 0.2), - ([0, 2], 0.2), - ([1], 0.3), - ([0, 1], 0.3), - ([1, 3], 0.4), - ([1, 2], 0.5), - ([0, 1, 2], 0.5), - ([1, 2, 3], 0.5), - ([5], 0.6), - ([6], 0.6), - ([5, 6], 0.6), - ([4], 0.7), - ([2, 4], 0.7), - ([0, 3], 0.8), - ([0, 1, 3], 0.8), - ([0, 2, 3], 0.8), - ([0, 1, 2, 3], 0.8), - ([4, 6], 0.9), - ([3, 6], 1.0), - ] - - -def test_automatic_dimension(): - st = SimplexTree() - assert st.__is_defined() == True - assert st.__is_persistence_defined() == False - - # insert test - assert st.insert([0, 1, 3], filtration=0.5) == True - assert st.insert([0, 1, 2], filtration=1.0) == True - - assert st.num_vertices() == 4 - assert st.num_simplices() == 11 - - assert st.dimension() == 2 - assert st.upper_bound_dimension() == 2 - - assert st.prune_above_filtration(0.6) == True - assert st.dimension() == 2 - assert st.upper_bound_dimension() == 2 - - st.assign_filtration([0, 1, 3], 0.7) - assert st.filtration([0, 1, 3]) == 0.7 - - st.remove_maximal_simplex([0, 1, 3]) - assert st.upper_bound_dimension() == 2 - assert st.dimension() == 1 - assert st.upper_bound_dimension() == 1 - - -def test_make_filtration_non_decreasing(): - st = SimplexTree() - assert st.__is_defined() == True - assert st.__is_persistence_defined() == False - - # Inserted simplex: - # 1 - # o - # /X\ - # o---o---o---o - # 2 0 3\X/4 - # o - # 5 - assert st.insert([2, 1, 0], filtration=2.0) == True - assert st.insert([3, 0], filtration=2.0) == True - assert st.insert([3, 4, 5], filtration=2.0) == True - - assert st.make_filtration_non_decreasing() == False - - # Because of non decreasing property of simplex tree, { 0 } , { 1 } and - # { 0, 1 } are going to be set from value 2.0 to 1.0 - st.insert([0, 1, 6, 7], filtration=1.0) - - assert st.make_filtration_non_decreasing() == False - - # Modify specific values to test make_filtration_non_decreasing - st.assign_filtration([0, 1, 6, 7], 0.8) - st.assign_filtration([0, 1, 6], 0.9) - st.assign_filtration([0, 6], 0.6) - st.assign_filtration([3, 4, 5], 1.2) - st.assign_filtration([3, 4], 1.1) - st.assign_filtration([4, 5], 1.99) - - assert st.make_filtration_non_decreasing() == True - - assert st.filtration([0, 1, 6, 7]) == 1.0 - assert st.filtration([0, 1, 6]) == 1.0 - assert st.filtration([0, 1]) == 1.0 - assert st.filtration([0]) == 1.0 - assert st.filtration([1]) == 1.0 - assert st.filtration([3, 4, 5]) == 2.0 - assert st.filtration([3, 4]) == 2.0 - assert st.filtration([4, 5]) == 2.0 diff --git a/src/cython/test/test_subsampling.py b/src/cython/test/test_subsampling.py deleted file mode 100755 index c816e203..00000000 --- a/src/cython/test/test_subsampling.py +++ /dev/null @@ -1,179 +0,0 @@ -import gudhi - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_write_off_file_for_tests(): - file = open("subsample.off", "w") - file.write("nOFF\n") - file.write("2 7 0 0\n") - file.write("1.0 1.0\n") - file.write("7.0 0.0\n") - file.write("4.0 6.0\n") - file.write("9.0 6.0\n") - file.write("0.0 14.0\n") - file.write("2.0 19.0\n") - file.write("9.0 17.0\n") - file.close() - - -def test_simple_choose_n_farthest_points_with_a_starting_point(): - point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] - i = 0 - for point in point_set: - # The iteration starts with the given starting point - sub_set = gudhi.choose_n_farthest_points( - points=point_set, nb_points=1, starting_point=i - ) - assert sub_set[0] == point_set[i] - i = i + 1 - - # The iteration finds then the farthest - sub_set = gudhi.choose_n_farthest_points( - points=point_set, nb_points=2, starting_point=1 - ) - assert sub_set[1] == point_set[3] - sub_set = gudhi.choose_n_farthest_points( - points=point_set, nb_points=2, starting_point=3 - ) - assert sub_set[1] == point_set[1] - sub_set = gudhi.choose_n_farthest_points( - points=point_set, nb_points=2, starting_point=0 - ) - assert sub_set[1] == point_set[2] - sub_set = gudhi.choose_n_farthest_points( - points=point_set, nb_points=2, starting_point=2 - ) - assert sub_set[1] == point_set[0] - - # Test the limits - assert ( - gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=0) == [] - ) - assert ( - gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=0) == [] - ) - assert ( - gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=1) == [] - ) - assert ( - gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=1) == [] - ) - - # From off file test - for i in range(0, 7): - assert ( - len( - gudhi.choose_n_farthest_points( - off_file="subsample.off", nb_points=i, starting_point=i - ) - ) - == i - ) - - -def test_simple_choose_n_farthest_points_randomed(): - point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] - # Test the limits - assert gudhi.choose_n_farthest_points(points=[], nb_points=0) == [] - assert gudhi.choose_n_farthest_points(points=[], nb_points=1) == [] - assert gudhi.choose_n_farthest_points(points=point_set, nb_points=0) == [] - - # Go furter than point set on purpose - for iter in range(1, 10): - sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=iter) - for sub in sub_set: - found = False - for point in point_set: - if point == sub: - found = True - # Check each sub set point is existing in the point set - assert found == True - - # From off file test - for i in range(0, 7): - assert ( - len(gudhi.choose_n_farthest_points(off_file="subsample.off", nb_points=i)) - == i - ) - - -def test_simple_pick_n_random_points(): - point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] - # Test the limits - assert gudhi.pick_n_random_points(points=[], nb_points=0) == [] - assert gudhi.pick_n_random_points(points=[], nb_points=1) == [] - assert gudhi.pick_n_random_points(points=point_set, nb_points=0) == [] - - # Go furter than point set on purpose - for iter in range(1, 10): - sub_set = gudhi.pick_n_random_points(points=point_set, nb_points=iter) - print(5) - for sub in sub_set: - found = False - for point in point_set: - if point == sub: - found = True - # Check each sub set point is existing in the point set - assert found == True - - # From off file test - for i in range(0, 7): - assert ( - len(gudhi.pick_n_random_points(off_file="subsample.off", nb_points=i)) == i - ) - - -def test_simple_sparsify_points(): - point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] - # Test the limits - # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 0.0) == [] - # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 10.0) == [] - assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=0.0) == point_set - assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=1.0) == point_set - assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.0) == [ - [0, 1], - [1, 0], - ] - assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.01) == [[0, 1]] - - assert ( - len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=0.0)) - == 7 - ) - assert ( - len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=30.0)) - == 5 - ) - assert ( - len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.0)) - == 4 - ) - assert ( - len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=90.0)) - == 3 - ) - assert ( - len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=100.0)) - == 2 - ) - assert ( - len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.0)) - == 2 - ) - assert ( - len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.01)) - == 1 - ) diff --git a/src/cython/test/test_tangential_complex.py b/src/cython/test/test_tangential_complex.py deleted file mode 100755 index 0f828d8e..00000000 --- a/src/cython/test/test_tangential_complex.py +++ /dev/null @@ -1,55 +0,0 @@ -from gudhi import TangentialComplex, SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_tangential(): - point_list = [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]] - tc = TangentialComplex(intrisic_dim=1, points=point_list) - assert tc.__is_defined() == True - assert tc.num_vertices() == 4 - assert tc.num_simplices() == 0 - assert tc.num_inconsistent_simplices() == 0 - assert tc.num_inconsistent_stars() == 0 - - tc.compute_tangential_complex() - assert tc.num_vertices() == 4 - assert tc.num_simplices() == 4 - assert tc.num_inconsistent_simplices() == 0 - assert tc.num_inconsistent_stars() == 0 - - st = tc.create_simplex_tree() - assert st.__is_defined() == True - assert st.__is_persistence_defined() == False - - assert st.num_simplices() == 6 - assert st.num_vertices() == 4 - - assert st.get_filtration() == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([0, 2], 0.0), - ([3], 0.0), - ([1, 3], 0.0), - ] - assert st.get_cofaces([0], 1) == [([0, 2], 0.0)] - - assert point_list[0] == tc.get_point(0) - assert point_list[1] == tc.get_point(1) - assert point_list[2] == tc.get_point(2) - assert point_list[3] == tc.get_point(3) - assert tc.get_point(4) == [] - assert tc.get_point(125) == [] diff --git a/src/cython/test/test_witness_complex.py b/src/cython/test/test_witness_complex.py deleted file mode 100755 index 36ced635..00000000 --- a/src/cython/test/test_witness_complex.py +++ /dev/null @@ -1,62 +0,0 @@ -from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree - -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Vincent Rouvreau - - Copyright (C) 2016 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -__author__ = "Vincent Rouvreau" -__copyright__ = "Copyright (C) 2016 Inria" -__license__ = "MIT" - - -def test_empty_witness_complex(): - witness = WitnessComplex() - assert witness.__is_defined() == False - - -def test_witness_complex(): - nearest_landmark_table = [ - [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], - [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], - [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], - [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], - [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]], - ] - - witness_complex = WitnessComplex(nearest_landmark_table=nearest_landmark_table) - simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1) - assert simplex_tree.num_vertices() == 5 - assert simplex_tree.num_simplices() == 31 - simplex_tree = witness_complex.create_simplex_tree( - max_alpha_square=4.1, limit_dimension=2 - ) - assert simplex_tree.num_vertices() == 5 - assert simplex_tree.num_simplices() == 25 - - -def test_strong_witness_complex(): - nearest_landmark_table = [ - [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], - [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], - [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], - [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], - [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]], - ] - - strong_witness_complex = StrongWitnessComplex( - nearest_landmark_table=nearest_landmark_table - ) - simplex_tree = strong_witness_complex.create_simplex_tree(max_alpha_square=4.1) - assert simplex_tree.num_vertices() == 5 - assert simplex_tree.num_simplices() == 31 - simplex_tree = strong_witness_complex.create_simplex_tree( - max_alpha_square=4.1, limit_dimension=2 - ) - assert simplex_tree.num_vertices() == 5 - assert simplex_tree.num_simplices() == 25 diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt new file mode 100644 index 00000000..9e128d30 --- /dev/null +++ b/src/python/CMakeLists.txt @@ -0,0 +1,432 @@ +project(Cython) + +function( add_GUDHI_PYTHON_lib THE_LIB ) + if(EXISTS ${THE_LIB}) + get_filename_component(THE_LIB_FILE_NAME ${THE_LIB} NAME_WE) + if(WIN32) + message("++ ${THE_LIB} => THE_LIB_FILE_NAME = ${THE_LIB_FILE_NAME}") + set(GUDHI_PYTHON_LIBRARIES "${GUDHI_PYTHON_LIBRARIES}'${THE_LIB_FILE_NAME}', " PARENT_SCOPE) + else(WIN32) + STRING(REGEX REPLACE "lib" "" UNIX_LIB_FILE_NAME ${THE_LIB_FILE_NAME}) + message("++ ${THE_LIB} => UNIX_LIB_FILE_NAME = ${UNIX_LIB_FILE_NAME}") + set(GUDHI_PYTHON_LIBRARIES "${GUDHI_PYTHON_LIBRARIES}'${UNIX_LIB_FILE_NAME}', " PARENT_SCOPE) + endif(WIN32) + endif(EXISTS ${THE_LIB}) +endfunction( add_GUDHI_PYTHON_lib ) + +# THE_TEST is the python test file name (without .py extension) containing tests functions +function( add_gudhi_py_test THE_TEST ) + if(PYTEST_FOUND) + # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python + # use py.test no cache provider, otherwise a .cache file is created in sources by py.test + add_test(NAME ${THE_TEST}_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py) + endif() +endfunction( add_gudhi_py_test ) + +# Set gudhi.__debug_info__ +# WARNING : to be done before setup.py.in configure_file +function( add_gudhi_debug_info DEBUG_INFO ) + set(GUDHI_PYTHON_DEBUG_INFO "${GUDHI_PYTHON_DEBUG_INFO} \"${DEBUG_INFO}\\n\" \\\n" PARENT_SCOPE) +endfunction( add_gudhi_debug_info ) + +if(PYTHONINTERP_FOUND) + if(CYTHON_FOUND) + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'off_reader', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'simplex_tree', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'rips_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'cubical_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'periodic_cubical_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'persistence_graphical_tools', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'reader_utils', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'witness_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'strong_witness_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'bottleneck', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'nerve_gic', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'subsampling', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'tangential_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'alpha_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'euclidean_witness_complex', ") + set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'euclidean_strong_witness_complex', ") + + add_gudhi_debug_info("Python version ${PYTHON_VERSION_STRING}") + add_gudhi_debug_info("Cython version ${CYTHON_VERSION}") + if(PYTEST_FOUND) + add_gudhi_debug_info("Pytest version ${PYTEST_VERSION}") + endif() + if(MATPLOTLIB_FOUND) + add_gudhi_debug_info("Matplotlib version ${MATPLOTLIB_VERSION}") + endif() + if(NUMPY_FOUND) + add_gudhi_debug_info("Numpy version ${NUMPY_VERSION}") + endif() + if(SCIPY_FOUND) + add_gudhi_debug_info("Scipy version ${SCIPY_VERSION}") + endif() + + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_SYSTEM_NO_DEPRECATED', ") + + # Gudhi and CGAL compilation option + if(MSVC) + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'/fp:strict', ") + else(MSVC) + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-std=c++11', ") + endif(MSVC) + if(CMAKE_COMPILER_IS_GNUCXX) + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ") + endif(CMAKE_COMPILER_IS_GNUCXX) + if (CMAKE_CXX_COMPILER_ID MATCHES Intel) + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-fp-model strict', ") + endif(CMAKE_CXX_COMPILER_ID MATCHES Intel) + if (DEBUG_TRACES) + # For programs to be more verbose + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DDEBUG_TRACES', ") + endif() + + if (EIGEN3_FOUND) + add_gudhi_debug_info("Eigen3 version ${EIGEN3_VERSION}") + # No problem, even if no CGAL found + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ") + endif (EIGEN3_FOUND) + + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'off_reader', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'simplex_tree', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'rips_complex', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'cubical_complex', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'periodic_cubical_complex', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'reader_utils', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'witness_complex', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'strong_witness_complex', ") + if (NOT CGAL_VERSION VERSION_LESS 4.11.0) + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'bottleneck', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'nerve_gic', ") + endif () + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'subsampling', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'tangential_complex', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'alpha_complex', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'euclidean_witness_complex', ") + set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'euclidean_strong_witness_complex', ") + endif () + + if(CGAL_FOUND) + can_cgal_use_cxx11_thread_local() + if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT) + if(CMAKE_BUILD_TYPE MATCHES Debug) + add_GUDHI_PYTHON_lib("${Boost_THREAD_LIBRARY_DEBUG}") + else() + add_GUDHI_PYTHON_lib("${Boost_THREAD_LIBRARY_RELEASE}") + endif() + message("** Add Boost ${Boost_LIBRARY_DIRS}") + set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ") + endif() + # Add CGAL compilation args + if(CGAL_HEADER_ONLY) + add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_HEADER_ONLY', ") + else(CGAL_HEADER_ONLY) + add_gudhi_debug_info("CGAL version ${CGAL_VERSION}") + add_GUDHI_PYTHON_lib("${CGAL_LIBRARY}") + set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ") + message("** Add CGAL ${CGAL_LIBRARIES_DIR}") + # If CGAL is not header only, CGAL library may link with boost system, + if(CMAKE_BUILD_TYPE MATCHES Debug) + add_GUDHI_PYTHON_lib("${Boost_SYSTEM_LIBRARY_DEBUG}") + else() + add_GUDHI_PYTHON_lib("${Boost_SYSTEM_LIBRARY_RELEASE}") + endif() + set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ") + message("** Add Boost ${Boost_LIBRARY_DIRS}") + endif(CGAL_HEADER_ONLY) + # GMP and GMPXX are not required, but if present, CGAL will link with them. + if(GMP_FOUND) + add_gudhi_debug_info("GMP_LIBRARIES = ${GMP_LIBRARIES}") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ") + add_GUDHI_PYTHON_lib("${GMP_LIBRARIES}") + set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ") + message("** Add gmp ${GMP_LIBRARIES_DIR}") + if(GMPXX_FOUND) + add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ") + add_GUDHI_PYTHON_lib("${GMPXX_LIBRARIES}") + set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ") + message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}") + endif(GMPXX_FOUND) + endif(GMP_FOUND) + endif(CGAL_FOUND) + + # Specific for Mac + if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-mmacosx-version-min=10.12', ") + set(GUDHI_PYTHON_EXTRA_LINK_ARGS "${GUDHI_PYTHON_EXTRA_LINK_ARGS}'-mmacosx-version-min=10.12', ") + endif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + + # Loop on INCLUDE_DIRECTORIES PROPERTY + get_property(GUDHI_INCLUDE_DIRECTORIES DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES) + foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES}) + set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${GUDHI_INCLUDE_DIRECTORY}', ") + endforeach() + set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${CMAKE_SOURCE_DIR}/${GUDHI_PYTHON_PATH}/include', ") + + if (TBB_FOUND AND WITH_GUDHI_USE_TBB) + add_gudhi_debug_info("TBB version ${TBB_INTERFACE_VERSION} found and used") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ") + if(CMAKE_BUILD_TYPE MATCHES Debug) + add_GUDHI_PYTHON_lib("${TBB_DEBUG_LIBRARY}") + add_GUDHI_PYTHON_lib("${TBB_MALLOC_DEBUG_LIBRARY}") + else() + add_GUDHI_PYTHON_lib("${TBB_RELEASE_LIBRARY}") + add_GUDHI_PYTHON_lib("${TBB_MALLOC_RELEASE_LIBRARY}") + endif() + set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${TBB_LIBRARY_DIRS}', ") + message("** Add tbb ${TBB_LIBRARY_DIRS}") + set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ") + endif() + + if(UNIX AND WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS) + set( GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}") + endif(UNIX AND WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS) + + # Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention + configure_file(setup.py.in "${CMAKE_CURRENT_BINARY_DIR}/setup.py" @ONLY) + + # Generate gudhi/__init__.py + file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + configure_file("gudhi/__init__.py.in" "${CMAKE_CURRENT_BINARY_DIR}/gudhi/__init__.py" @ONLY) + + # Other .py files + file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + + add_custom_command( + OUTPUT gudhi.so + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/setup.py" "build_ext" "--inplace") + + add_custom_target(python ALL DEPENDS gudhi.so + COMMENT "Do not forget to add ${CMAKE_CURRENT_BINARY_DIR}/ to your PYTHONPATH before using examples or tests") + + install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py install)") + + # Test examples + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + # Bottleneck and Alpha + add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py" + -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3) + + if(MATPLOTLIB_FOUND AND NUMPY_FOUND) + # Tangential + add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py" + --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) + + add_gudhi_py_test(test_tangential_complex) + + # Witness complex AND Subsampling + add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py" + --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2) + + add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py" + --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2) + endif() + + # Subsampling + add_gudhi_py_test(test_subsampling) + + endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + if (NOT CGAL_VERSION VERSION_LESS 4.11.0) + # Bottleneck + add_test(NAME bottleneck_basic_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py") + + add_gudhi_py_test(test_bottleneck_distance) + + # Cover complex + file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + add_test(NAME cover_complex_nerve_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py" + -f human.off -c 2 -r 10 -g 0.3) + + add_test(NAME cover_complex_coordinate_gic_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py" + -f human.off -c 0 -v) + + add_test(NAME cover_complex_functional_gic_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py" + -o lucky_cat.off + -f lucky_cat_PCA1 -v) + + add_test(NAME cover_complex_voronoi_gic_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py" + -f human.off -n 700 -v) + + add_gudhi_py_test(test_cover_complex) + endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) + + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + # Alpha + add_test(NAME alpha_complex_from_points_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py") + + if(MATPLOTLIB_FOUND AND NUMPY_FOUND) + add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py" + --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6) + endif() + + add_gudhi_py_test(test_alpha_complex) + + endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + # Euclidean witness + add_gudhi_py_test(test_euclidean_witness_complex) + + endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + + # Cubical + add_test(NAME periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py" + --no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt) + + if(NUMPY_FOUND) + add_test(NAME random_cubical_complex_persistence_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py" + 10 10 10) + endif() + + add_gudhi_py_test(test_cubical_complex) + + # Rips + if(MATPLOTLIB_FOUND AND NUMPY_FOUND) + add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py" + --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3) + + add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py + --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3) + endif() + + add_test(NAME rips_complex_from_points_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_from_points_example.py) + + add_gudhi_py_test(test_rips_complex) + + # Simplex tree + add_test(NAME simplex_tree_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/simplex_tree_example.py) + + add_gudhi_py_test(test_simplex_tree) + + # Witness + add_test(NAME witness_complex_from_nearest_landmark_table_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/witness_complex_from_nearest_landmark_table.py) + + add_gudhi_py_test(test_witness_complex) + + # Reader utils + add_gudhi_py_test(test_reader_utils) + + # Documentation generation is available through sphinx - requires all modules + if(SPHINX_PATH) + if(MATPLOTLIB_FOUND) + if(NUMPY_FOUND) + if(SCIPY_FOUND) + if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/") + # User warning - Sphinx is a static pages generator, and configured to work fine with user_version + # Images and biblio warnings because not found on developper version + if (GUDHI_PYTHON_PATH STREQUAL "src/python") + set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss") + endif() + # sphinx target requires gudhi.so, because conf.py reads gudhi version from it + add_custom_target(sphinx + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx + DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so" + COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM) + + add_test(NAME sphinx_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" + ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest) + + # Set missing or not modules + set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES") + else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0") + set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") + endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) + else(SCIPY_FOUND) + message("++ Python documentation module will not be compiled because scipy was not found") + set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") + endif(SCIPY_FOUND) + else(NUMPY_FOUND) + message("++ Python documentation module will not be compiled because numpy was not found") + set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") + endif(NUMPY_FOUND) + else(MATPLOTLIB_FOUND) + message("++ Python documentation module will not be compiled because matplotlib was not found") + set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") + endif(MATPLOTLIB_FOUND) + else(SPHINX_PATH) + message("++ Python documentation module will not be compiled because sphinx and sphinxcontrib-bibtex were not found") + set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES") + endif(SPHINX_PATH) + + + # Set missing or not modules + set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") + else(CYTHON_FOUND) + message("++ Python module will not be compiled because cython was not found") + set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES") + endif(CYTHON_FOUND) +else(PYTHONINTERP_FOUND) + message("++ Python module will not be compiled because no Python interpreter was found") + set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES") +endif(PYTHONINTERP_FOUND) diff --git a/src/python/CONVENTIONS b/src/python/CONVENTIONS new file mode 100644 index 00000000..804e97f3 --- /dev/null +++ b/src/python/CONVENTIONS @@ -0,0 +1,9 @@ +Gudhi is following PEP8 conventions. + +Please refer to: +https://www.python.org/dev/peps/pep-0008/ + +A summary: + - modules (filenames) should have short, all-lowercase names, and they can contain underscores. + - packages (directories) should have short, all-lowercase names, preferably without underscores. + - classes should use the CapWords convention. \ No newline at end of file diff --git a/src/python/README b/src/python/README new file mode 100644 index 00000000..7d2c4491 --- /dev/null +++ b/src/python/README @@ -0,0 +1,3 @@ + +If you do not want to install the package, just launch the following command to help Python to find the compiled package : +$> export PYTHONPATH=`pwd`:$PYTHONPATH diff --git a/src/python/doc/_templates/layout.html b/src/python/doc/_templates/layout.html new file mode 100644 index 00000000..bc0e9658 --- /dev/null +++ b/src/python/doc/_templates/layout.html @@ -0,0 +1,275 @@ +{# + basic/layout.html + ~~~~~~~~~~~~~~~~~ + + Master layout template for Sphinx themes. + + :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +#} +{%- block doctype -%} + +{%- endblock %} +{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} +{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} +{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and + (sidebars != []) %} +{%- set url_root = pathto('', 1) %} +{# XXX necessary? #} +{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} +{%- if not embedded and docstitle %} + {%- set titlesuffix = " — "|safe + docstitle|e %} +{%- else %} + {%- set titlesuffix = "" %} +{%- endif %} + +{%- macro relbar() %} + +{%- endmacro %} + +{%- macro sidebar() %} + {%- if render_sidebar %} + + {%- endif %} +{%- endmacro %} + +{%- macro script() %} + + {%- for scriptfile in script_files %} + + {%- endfor %} +{%- endmacro %} + +{%- macro css() %} + + + + + + {%- for cssfile in css_files %} + + {%- endfor %} +{%- endmacro %} + + + + + + {{ metatags }} + {%- block htmltitle %} + {{ title|striptags|e }}{{ titlesuffix }} + {%- endblock %} + {{ css() }} + {%- if not embedded %} + {{ script() }} + {%- if use_opensearch %} + + {%- endif %} + {%- if favicon %} + + {%- endif %} + {%- endif %} +{%- block linktags %} + {%- if hasdoc('about') %} + + {%- endif %} + {%- if hasdoc('genindex') %} + + {%- endif %} + {%- if hasdoc('search') %} + + {%- endif %} + {%- if hasdoc('copyright') %} + + {%- endif %} + + {%- if parents %} + + {%- endif %} + {%- if next %} + + {%- endif %} + {%- if prev %} + + {%- endif %} +{%- endblock %} +{%- block extrahead %} {% endblock %} + + + + + + + +{%- block header %}{% endblock %} + +{%- block relbar1 %}{% endblock %} + +{%- block content %} + {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} + +
+ {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} +
+ {% block body %} {% endblock %} +
+ {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} + + {%- block sidebar2 %}{{ sidebar() }}{% endblock %} +
+
+{%- endblock %} + +{%- block relbar2 %}{% endblock %} + +{%- block footer %} + +{%- endblock %} + + + diff --git a/src/python/doc/alpha_complex_ref.rst b/src/python/doc/alpha_complex_ref.rst new file mode 100644 index 00000000..7da79543 --- /dev/null +++ b/src/python/doc/alpha_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +============================== +Alpha complex reference manual +============================== + +.. autoclass:: gudhi.AlphaComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.AlphaComplex.__init__ diff --git a/src/python/doc/alpha_complex_sum.inc b/src/python/doc/alpha_complex_sum.inc new file mode 100644 index 00000000..9049e654 --- /dev/null +++ b/src/python/doc/alpha_complex_sum.inc @@ -0,0 +1,20 @@ +.. table:: + :widths: 30 50 20 + + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ + | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | + | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | + | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The filtration value of each simplex is computed as the square of the | | + | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: MIT (`GPL v3 `_) | + | | is then said to be Gabriel), and as the minimum of the filtration | | + | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 | + | | otherwise. All simplices that have a filtration value strictly | | + | | greater than a given alpha squared value are not inserted into the | | + | | complex. | | + | | | | + | | This package requires having CGAL version 4.7 or higher (4.8.1 is | | + | | advised for better performance). | | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------+ + | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst new file mode 100644 index 00000000..d1e9c7cd --- /dev/null +++ b/src/python/doc/alpha_complex_user.rst @@ -0,0 +1,210 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Alpha complex user manual +========================= +Definition +---------- + +.. include:: alpha_complex_sum.inc + +Alpha_complex is constructing a :doc:`Simplex_tree ` using +`Delaunay Triangulation `_ +:cite:`cgal:hdj-t-15b` from `CGAL `_ (the Computational Geometry Algorithms Library +:cite:`cgal:eb-15b`). + +Remarks +^^^^^^^ +When Alpha_complex is constructed with an infinite value of :math:`\alpha`, the complex is a Delaunay complex. + +Example from points +------------------- + +This example builds the Delaunay triangulation from the given points, and initializes the alpha complex with it: + +.. testcode:: + + import gudhi + alpha_complex = gudhi.AlphaComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]]) + + simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0) + result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + fmt = '%s -> %.2f' + for filtered_value in simplex_tree.get_filtration(): + print(fmt % tuple(filtered_value)) + +The output is: + +.. testoutput:: + + Alpha complex is of dimension 2 - 25 simplices - 7 vertices. + [0] -> 0.00 + [1] -> 0.00 + [2] -> 0.00 + [3] -> 0.00 + [4] -> 0.00 + [5] -> 0.00 + [6] -> 0.00 + [2, 3] -> 6.25 + [4, 5] -> 7.25 + [0, 2] -> 8.50 + [0, 1] -> 9.25 + [1, 3] -> 10.00 + [1, 2] -> 11.25 + [1, 2, 3] -> 12.50 + [0, 1, 2] -> 13.00 + [5, 6] -> 13.25 + [2, 4] -> 20.00 + [4, 6] -> 22.74 + [4, 5, 6] -> 22.74 + [3, 6] -> 30.25 + [2, 6] -> 36.50 + [2, 3, 6] -> 36.50 + [2, 4, 6] -> 37.24 + [0, 4] -> 59.71 + [0, 2, 4] -> 59.71 + + +Algorithm +--------- + +Data structure +^^^^^^^^^^^^^^ + +In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation. +(The filtration value is set to NaN, which stands for unknown value): + +.. figure:: + ../../doc/Alpha_complex/alpha_complex_doc.png + :figclass: align-center + :alt: Simplex tree structure construction example + + Simplex tree structure construction example + +Filtration value computation algorithm +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + **for** i : dimension :math:`\rightarrow` 0 **do** + **for all** :math:`\sigma` of dimension i + **if** filtration(:math:`\sigma`) is NaN **then** + filtration(:math:`\sigma`) = :math:`\alpha^2(\sigma)` + **end if** + + *//propagate alpha filtration value* + + **for all** :math:`\tau` face of :math:`\sigma` + **if** filtration(:math:`\tau`) is not NaN **then** + filtration(:math:`\tau`) = filtration(:math:`\sigma`) + **end if** + **end for** + **end for** + **end for** + + make_filtration_non_decreasing() + + prune_above_filtration() + +Dimension 2 +^^^^^^^^^^^ + +From the example above, it means the algorithm looks into each triangle ([0,1,2], [0,2,4], [1,2,3], ...), +computes the filtration value of the triangle, and then propagates the filtration value as described +here: + +.. figure:: + ../../doc/Alpha_complex/alpha_complex_doc_420.png + :figclass: align-center + :alt: Filtration value propagation example + + Filtration value propagation example + +Dimension 1 +^^^^^^^^^^^ + +Then, the algorithm looks into each edge ([0,1], [0,2], [1,2], ...), +computes the filtration value of the edge (in this case, propagation will have no effect). + +Dimension 0 +^^^^^^^^^^^ + +Finally, the algorithm looks into each vertex ([0], [1], [2], [3], [4], [5] and [6]) and +sets the filtration value (0 in case of a vertex - propagation will have no effect). + +Non decreasing filtration values +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not +quite define a proper filtration (i.e. non-decreasing with respect to inclusion). +We fix that up by calling `Simplex_tree::make_filtration_non_decreasing()` (cf. +`C++ version `_). + +Prune above given filtration value +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()` +in the `C++ version `_). +In the following example, the value is given by the user as argument of the program. + + +Example from OFF file +^^^^^^^^^^^^^^^^^^^^^ + +This example builds the Delaunay triangulation from the points given by an OFF file, and initializes the alpha complex +with it. + + +Then, it is asked to display information about the alpha complex: + +.. testcode:: + + import gudhi + alpha_complex = gudhi.AlphaComplex(off_file=gudhi.__root_source_dir__ + \ + '/data/points/alphacomplexdoc.off') + simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=59.0) + result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + fmt = '%s -> %.2f' + for filtered_value in simplex_tree.get_filtration(): + print(fmt % tuple(filtered_value)) + +the program output is: + +.. testoutput:: + + Alpha complex is of dimension 2 - 23 simplices - 7 vertices. + [0] -> 0.00 + [1] -> 0.00 + [2] -> 0.00 + [3] -> 0.00 + [4] -> 0.00 + [5] -> 0.00 + [6] -> 0.00 + [2, 3] -> 6.25 + [4, 5] -> 7.25 + [0, 2] -> 8.50 + [0, 1] -> 9.25 + [1, 3] -> 10.00 + [1, 2] -> 11.25 + [1, 2, 3] -> 12.50 + [0, 1, 2] -> 13.00 + [5, 6] -> 13.25 + [2, 4] -> 20.00 + [4, 6] -> 22.74 + [4, 5, 6] -> 22.74 + [3, 6] -> 30.25 + [2, 6] -> 36.50 + [2, 3, 6] -> 36.50 + [2, 4, 6] -> 37.24 + +CGAL citations +============== + +.. bibliography:: ../../biblio/how_to_cite_cgal.bib + :filter: docnames + :style: unsrt diff --git a/src/python/doc/bottleneck_distance_sum.inc b/src/python/doc/bottleneck_distance_sum.inc new file mode 100644 index 00000000..6eb0ac19 --- /dev/null +++ b/src/python/doc/bottleneck_distance_sum.inc @@ -0,0 +1,14 @@ +.. table:: + :widths: 30 50 20 + + +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+ + | .. figure:: | Bottleneck distance measures the similarity between two persistence | :Author: François Godi | + | ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | | + | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 | + | | diagonal points) such that any couple of matched points are at | | + | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :Copyright: MIT (`GPL v3 `_) | + | the longest edge | norm in :math:`\mathbb{R}^2`. | | + | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | + +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+ + | * :doc:`bottleneck_distance_user` | | + +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/bottleneck_distance_user.rst b/src/python/doc/bottleneck_distance_user.rst new file mode 100644 index 00000000..9435c7f1 --- /dev/null +++ b/src/python/doc/bottleneck_distance_user.rst @@ -0,0 +1,67 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Bottleneck distance user manual +=============================== +Definition +---------- + +.. include:: bottleneck_distance_sum.inc + +This implementation is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" +:cite:`DBLP:journals/algorithmica/EfratIK01`. Another relevant publication, although it was not used is +"Geometry Helps to Compare Persistence Diagrams" :cite:`Kerber:2017:GHC:3047249.3064175`. + +Function +-------- +.. autofunction:: gudhi.bottleneck_distance + +Distance computation +-------------------- + +The following example explains how the distance is computed: + +.. testcode:: + + import gudhi + + message = "Bottleneck distance = " + '%.1f' % gudhi.bottleneck_distance([[0., 0.]], [[0., 13.]]) + print(message) + +.. testoutput:: + + Bottleneck distance = 6.5 + +.. figure:: + ../../doc/Bottleneck_distance/bottleneck_distance_example.png + :figclass: align-center + + The point (0, 13) is at distance 6.5 from the diagonal and more + specifically from the point (6.5, 6.5) + + +Basic example +------------- + +This other example computes the bottleneck distance from 2 persistence diagrams: + +.. testcode:: + + import gudhi + + diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]] + diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]] + + message = "Bottleneck distance approximation = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0.1) + print(message) + + message = "Bottleneck distance value = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2) + print(message) + +The output is: + +.. testoutput:: + + Bottleneck distance approximation = 0.81 + Bottleneck distance value = 0.75 diff --git a/src/python/doc/citation.rst b/src/python/doc/citation.rst new file mode 100644 index 00000000..117eb9dd --- /dev/null +++ b/src/python/doc/citation.rst @@ -0,0 +1,19 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Acknowledging the GUDHI library +############################### + +We kindly ask users to cite the GUDHI library as appropriately as possible in +their papers, and to mention the use of the GUDHI library on the web pages of +their projects using GUDHI and provide us with links to these web pages. Feel +free to contact us in case you have any question or remark on this topic. + +We provide GUDHI bibtex entries for the modules of the User and Reference +Manual, as well as for publications directly related to the GUDHI library. + +GUDHI bibtex +************ + +.. literalinclude:: ../../biblio/how_to_cite_gudhi.bib diff --git a/src/python/doc/conf.py b/src/python/doc/conf.py new file mode 100755 index 00000000..e4c718c3 --- /dev/null +++ b/src/python/doc/conf.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- +# +# GUDHI documentation build configuration file, created by +# sphinx-quickstart on Thu Jun 30 09:55:51 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# Path to Gudhi.so from source path +sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'matplotlib.sphinxext.plot_directive', + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.todo', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinxcontrib.bibtex', +] + +todo_include_todos = True +# plot option : do not show hyperlinks (Source code, png, hires.png, pdf) +plot_html_show_source_link = False +plot_html_show_formats = False +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +import gudhi + +# General information about the project. +project = gudhi.__name__ +copyright = gudhi.__copyright__ + ' - MIT' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = gudhi.__version__ +# The full version, including alpha/beta/rc tags. +#release = '2.0.1-rc1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build', '*.inc'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'classic' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "sidebarbgcolor": "#A1ADCD", + "sidebartextcolor": "black", + "sidebarlinkcolor": "#334D5C", + "body_max_width": "100%", +} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {'installation': 'installation.html'} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'GUDHIdoc' + diff --git a/src/python/doc/cubical_complex_ref.rst b/src/python/doc/cubical_complex_ref.rst new file mode 100644 index 00000000..1fe9d5fb --- /dev/null +++ b/src/python/doc/cubical_complex_ref.rst @@ -0,0 +1,13 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Cubical complex reference manual +################################ + +.. autoclass:: gudhi.CubicalComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.CubicalComplex.__init__ diff --git a/src/python/doc/cubical_complex_sum.inc b/src/python/doc/cubical_complex_sum.inc new file mode 100644 index 00000000..f200e695 --- /dev/null +++ b/src/python/doc/cubical_complex_sum.inc @@ -0,0 +1,14 @@ +.. table:: + :widths: 30 50 20 + + +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ + | .. figure:: | The cubical complex is an example of a structured complex useful in | :Author: Pawel Dlotko | + | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | | + | :alt: Cubical complex representation | analysis. | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | | | + | | | :Copyright: MIT | + | | | | + +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ + | * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | + | | * :doc:`periodic_cubical_complex_ref` | + +--------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/cubical_complex_user.rst b/src/python/doc/cubical_complex_user.rst new file mode 100644 index 00000000..b13b500e --- /dev/null +++ b/src/python/doc/cubical_complex_user.rst @@ -0,0 +1,168 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Cubical complex user manual +=========================== +Definition +---------- + +===================================== ===================================== ===================================== +:Author: Pawel Dlotko :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3 +===================================== ===================================== ===================================== + ++---------------------------------------------+----------------------------------------------------------------------+ +| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | +| | * :doc:`periodic_cubical_complex_ref` | ++---------------------------------------------+----------------------------------------------------------------------+ + +The cubical complex is an example of a structured complex useful in computational mathematics (specially rigorous +numerics) and image analysis. + +An *elementary interval* is an interval of a form :math:`[n,n+1]`, or :math:`[n,n]`, for :math:`n \in \mathcal{Z}`. +The first one is called *non-degenerate*, while the second one is a *degenerate* interval. A +*boundary of a elementary interval* is a chain :math:`\partial [n,n+1] = [n+1,n+1]-[n,n]` in case of +non-degenerated elementary interval and :math:`\partial [n,n] = 0` in case of degenerate elementary interval. An +*elementary cube* :math:`C` is a product of elementary intervals, :math:`C=I_1 \times \ldots \times I_n`. +*Embedding dimension* of a cube is n, the number of elementary intervals (degenerate or not) in the product. +A *dimension of a cube* :math:`C=I_1 \times ... \times I_n` is the number of non degenerate elementary +intervals in the product. A *boundary of a cube* :math:`C=I_1 \times \ldots \times I_n` is a chain obtained +in the following way: + +.. math:: + + \partial C = (\partial I_1 \times \ldots \times I_n) + (I_1 \times \partial I_2 \times \ldots \times I_n) + + \ldots + (I_1 \times I_2 \times \ldots \times \partial I_n). + +A *cubical complex* :math:`\mathcal{K}` is a collection of cubes closed under operation of taking boundary +(i.e. boundary of every cube from the collection is in the collection). A cube :math:`C` in cubical complex +:math:`\mathcal{K}` is *maximal* if it is not in a boundary of any other cube in :math:`\mathcal{K}`. A +*support* of a cube :math:`C` is the set in :math:`\mathbb{R}^n` occupied by :math:`C` (:math:`n` is the embedding +dimension of :math:`C`). + +Cubes may be equipped with a filtration values in which case we have filtered cubical complex. All the cubical +complexes considered in this implementation are filtered cubical complexes (although, the range of a filtration may +be a set of two elements). + +For further details and theory of cubical complexes, please consult :cite:`kaczynski2004computational` as well as the +following paper :cite:`peikert2012topological`. + +Data structure. +--------------- + +The implementation of Cubical complex provides a representation of complexes that occupy a rectangular region in +:math:`\mathbb{R}^n`. This extra assumption allows for a memory efficient way of storing cubical complexes in a form +of so called bitmaps. Let +:math:`R = [b_1,e_1] \times \ldots \times [b_n,e_n]`, for :math:`b_1,...b_n,e_1,...,e_n \in \mathbb{Z}`, +:math:`b_i \leq d_i` be the considered rectangular region and let :math:`\mathcal{K}` be a filtered +cubical complex having the rectangle :math:`R` as its support. Note that the structure of the coordinate system gives +a way a lexicographical ordering of cells of :math:`\mathcal{K}`. This ordering is a base of the presented +bitmap-based implementation. In this implementation, the whole cubical complex is stored as a vector of the values +of filtration. This, together with dimension of :math:`\mathcal{K}` and the sizes of :math:`\mathcal{K}` in all +directions, allows to determine, dimension, neighborhood, boundary and coboundary of every cube +:math:`C \in \mathcal{K}`. + +.. figure:: + ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png + :alt: Cubical complex. + :figclass: align-center + + Cubical complex. + +Note that the cubical complex in the figure above is, in a natural way, a product of one dimensional cubical +complexes in :math:`\mathbb{R}`. The number of all cubes in each direction is equal :math:`2n+1`, where :math:`n` is +the number of maximal cubes in the considered direction. Let us consider a cube at the position :math:`k` in the +bitmap. +Knowing the sizes of the bitmap, by a series of modulo operation, we can determine which elementary intervals are +present in the product that gives the cube :math:`C`. In a similar way, we can compute boundary and the coboundary of +each cube. Further details can be found in the literature. + +Input Format. +------------- + +In the current implantation, filtration is given at the maximal cubes, and it is then extended by the lower star +filtration to all cubes. There are a number of constructors that can be used to construct cubical complex by users +who want to use the code directly. They can be found in the :doc:`cubical_complex_ref`. +Currently one input from a text file is used. It uses a format inspired from the Perseus software +`Perseus software `_ by Vidit Nanda. + +.. note:: + While Perseus assume the filtration of all maximal cubes to be non-negative, over here we do not enforce this and + we allow any filtration values. As a consequence one cannot use ``-1``'s to indicate missing cubes. If you have + missing cubes in your complex, please set their filtration to :math:`+\infty` (aka. ``inf`` in the file). + +The file format is described in details in :ref:`Perseus file format` file format section. + +.. testcode:: + + import gudhi + cubical_complex = gudhi.CubicalComplex(perseus_file=gudhi.__root_source_dir__ + \ + '/data/bitmap/cubicalcomplexdoc.txt') + result_str = 'Cubical complex is of dimension ' + repr(cubical_complex.dimension()) + ' - ' + \ + repr(cubical_complex.num_simplices()) + ' simplices.' + print(result_str) + +the program output is: + +.. testoutput:: + + Cubical complex is of dimension 2 - 49 simplices. + +Periodic boundary conditions. +----------------------------- + +Often one would like to impose periodic boundary conditions to the cubical complex (cf. +:doc:`periodic_cubical_complex_ref`). +Let :math:`I_1\times ... \times I_n` be a box that is decomposed with a cubical complex :math:`\mathcal{K}`. +Imposing periodic boundary conditions in the direction i, means that the left and the right side of a complex +:math:`\mathcal{K}` are considered the same. In particular, if for a bitmap :math:`\mathcal{K}` periodic boundary +conditions are imposed in all directions, then complex :math:`\mathcal{K}` became n-dimensional torus. One can use +various constructors from the file Bitmap_cubical_complex_periodic_boundary_conditions_base.h to construct cubical +complex with periodic boundary conditions. + +One can also use Perseus style input files (see :doc:`Perseus `) for the specific periodic case: + +.. testcode:: + + import gudhi + periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \ + '/data/bitmap/periodiccubicalcomplexdoc.txt') + result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \ + repr(periodic_cc.num_simplices()) + ' simplices.' + print(result_str) + +the program output is: + +.. testoutput:: + + Periodic cubical complex is of dimension 2 - 42 simplices. + +Or it can be defined as follows: + +.. testcode:: + + from gudhi import PeriodicCubicalComplex as pcc + periodic_cc = pcc(dimensions=[3,3], + top_dimensional_cells= [0, 0, 0, 0, 1, 0, 0, 0, 0], + periodic_dimensions=[True, False]) + result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \ + repr(periodic_cc.num_simplices()) + ' simplices.' + print(result_str) + +the program output is: + +.. testoutput:: + + Periodic cubical complex is of dimension 2 - 42 simplices. + +Examples. +--------- + +End user programs are available in python/example/ folder. + +Bibliography +============ + +.. bibliography:: ../../biblio/bibliography.bib + :filter: docnames + :style: unsrt diff --git a/src/python/doc/euclidean_strong_witness_complex_ref.rst b/src/python/doc/euclidean_strong_witness_complex_ref.rst new file mode 100644 index 00000000..1a602cd5 --- /dev/null +++ b/src/python/doc/euclidean_strong_witness_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +================================================= +Euclidean strong witness complex reference manual +================================================= + +.. autoclass:: gudhi.EuclideanStrongWitnessComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.EuclideanStrongWitnessComplex.__init__ diff --git a/src/python/doc/euclidean_witness_complex_ref.rst b/src/python/doc/euclidean_witness_complex_ref.rst new file mode 100644 index 00000000..28daf965 --- /dev/null +++ b/src/python/doc/euclidean_witness_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +========================================== +Euclidean witness complex reference manual +========================================== + +.. autoclass:: gudhi.EuclideanWitnessComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.EuclideanWitnessComplex.__init__ diff --git a/src/python/doc/examples.rst b/src/python/doc/examples.rst new file mode 100644 index 00000000..edbc2f72 --- /dev/null +++ b/src/python/doc/examples.rst @@ -0,0 +1,30 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Examples +######## + +.. only:: builder_html + + * :download:`rips_complex_from_points_example.py <../example/rips_complex_from_points_example.py>` + * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>` + * :download:`simplex_tree_example.py <../example/simplex_tree_example.py>` + * :download:`alpha_rips_persistence_bottleneck_distance.py <../example/alpha_rips_persistence_bottleneck_distance.py>` + * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` + * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` + * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` + * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>` + * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>` + * :download:`witness_complex_from_nearest_landmark_table.py <../example/witness_complex_from_nearest_landmark_table.py>` + * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` + * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` + * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` + * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` + * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` + * :download:`sparse_rips_persistence_diagram.py <../example/sparse_rips_persistence_diagram.py>` + * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>` + * :download:`coordinate_graph_induced_complex.py <../example/coordinate_graph_induced_complex.py>` + * :download:`functional_graph_induced_complex.py <../example/functional_graph_induced_complex.py>` + * :download:`voronoi_graph_induced_complex.py <../example/voronoi_graph_induced_complex.py>` + * :download:`nerve_of_a_covering.py <../example/nerve_of_a_covering.py>` diff --git a/src/python/doc/fileformats.rst b/src/python/doc/fileformats.rst new file mode 100644 index 00000000..345dfdba --- /dev/null +++ b/src/python/doc/fileformats.rst @@ -0,0 +1,127 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +File formats +############ + +OFF file format +*************** + +OFF files must be conform to format described here: +http://www.geomview.org/docs/html/OFF.html + +OFF files are mainly used as point cloud inputs. Here is an example of 7 points +in a 3-dimensional space. As edges and faces are not used for point set, there +is no need to specify them (just set their numbers to 0): + +.. literalinclude:: ../../data/points/alphacomplexdoc.off + +.. centered:: ../../points/alphacomplexdoc.off + +For dimensions bigger than 3, the dimension can be set like here:: + + # Dimension is no more 3 + nOFF + # dimension 4 - 7 vertices - 0 face - 0 edge + 4 7 0 0 + # Point set: + 1.0 1.0 0.0 0.0 + 7.0 0.0 0.0 0.0 + 4.0 6.0 0.0 0.0 + 9.0 6.0 0.0 0.0 + 0.0 14.0 0.0 0.0 + 2.0 19.0 0.0 0.0 + 9.0 17.0 0.0 0.0 + +Persistence Diagram +******************* + +Such a file, whose extension is usually ``.pers``, contains a list of +persistence intervals. + +Lines starting with ``#`` are ignored (comments). + +Other lines might contain 2, 3 or 4 values (the number of values on each line +must be the same for all lines):: + + [[field] dimension] birth death + +Here is a simple sample file:: + + # Persistence diagram example + 2 2.7 3.7 + 2 9.6 14. + # Some comments + 3 34.2 34.974 + 4 3. inf + +Other sample files can be found in the `data/persistence_diagram` folder. + +Such files can be generated with +:meth:`gudhi.SimplexTree.write_persistence_diagram`, read with +:meth:`gudhi.read_persistence_intervals_grouped_by_dimension`, or +:meth:`gudhi.read_persistence_intervals_in_dimension` and displayed with +:meth:`gudhi.plot_persistence_barcode` or +:meth:`gudhi.plot_persistence_diagram`. + +Iso-cuboid +********** + +Such a file describes an iso-oriented cuboid with diagonal opposite vertices +(min_x, min_y, min_z,...) and (max_x, max_y, max_z, ...). The format is:: + + min_x min_y [min_z ...] + max_x max_y [max_z ...] + +Here is a simple sample file in the 3D case:: + + -1. -1. -1. + 1. 1. 1. + + +.. _Perseus file format: + +Perseus +******* + +This file format is a format inspired from the +`Perseus software `_ by Vidit Nanda. +The first line contains a number d begin the dimension of the bitmap (2 in the +example below). Next d lines are the numbers of top dimensional cubes in each +dimensions (3 and 3 in the example below). Next, in lexicographical order, the +filtration of top dimensional cubes is given (1 4 6 8 20 4 7 6 5 in the example +below). + +.. figure:: + ../../doc/Bitmap_cubical_complex/exampleBitmap.png + :alt: Example of a input data. + :figclass: align-center + + Example of a input data. + +The input file for the following complex is: + +.. literalinclude:: ../../data/bitmap/cubicalcomplexdoc.txt + +.. centered:: ../../data/bitmap/cubicalcomplexdoc.txt + +To indicate periodic boundary conditions in a given direction, then number of +top dimensional cells in this direction have to be multiplied by -1. For +instance: + +.. literalinclude:: ../../data/bitmap/periodiccubicalcomplexdoc.txt + +.. centered:: ../../data/bitmap/periodiccubicalcomplexdoc.txt + + +Indicate that we have imposed periodic boundary conditions in the direction x, +but not in the direction y. + +Other sample files can be found in the `data/bitmap` folder. + +.. note:: + Unlike in Perseus format the filtration on the maximal cubes can be any + double precision number. Consequently one cannot mark the cubes that are + not present with ``-1``'s. To do that please set their filtration value to + :math:`+\infty` (aka. ``inf`` in the file). \ No newline at end of file diff --git a/src/python/doc/img/graphical_tools_representation.png b/src/python/doc/img/graphical_tools_representation.png new file mode 100644 index 00000000..9759f7ba Binary files /dev/null and b/src/python/doc/img/graphical_tools_representation.png differ diff --git a/src/python/doc/index.rst b/src/python/doc/index.rst new file mode 100644 index 00000000..e379bc23 --- /dev/null +++ b/src/python/doc/index.rst @@ -0,0 +1,86 @@ +GUDHI Python module documentation +################################# + +.. figure:: + ../../doc/common/Gudhi_banner.png + :alt: Gudhi banner + :figclass: align-center + +Complexes +********* + +Cubical complexes +================= + +.. include:: cubical_complex_sum.inc + +Simplicial complexes +==================== + +Alpha complex +------------- + +.. include:: alpha_complex_sum.inc + +Rips complex +------------- + +.. include:: rips_complex_sum.inc + +Witness complex +--------------- + +.. include:: witness_complex_sum.inc + +Cover complexes +=============== + +.. include:: nerve_gic_complex_sum.inc + +Data structures and basic operations +************************************ + +Data structures +=============== + +Simplex tree +------------ + +.. include:: simplex_tree_sum.inc + +Topological descriptors computation +*********************************** + +Persistence cohomology +====================== + +.. include:: persistent_cohomology_sum.inc + +Manifold reconstruction +*********************** + +Tangential complex +================== + +.. include:: tangential_complex_sum.inc + + +Topological descriptors tools +***************************** + +Bottleneck distance +=================== + +.. include:: bottleneck_distance_sum.inc + +Persistence graphical tools +=========================== + +.. include:: persistence_graphical_tools_sum.inc + +Bibliography +************ + +.. bibliography:: ../../biblio/bibliography.bib + :filter: docnames + :style: unsrt diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst new file mode 100644 index 00000000..1c672ce3 --- /dev/null +++ b/src/python/doc/installation.rst @@ -0,0 +1,256 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Installation +############ + +Compiling +********* +The library uses c++11 and requires `Boost `_ ≥ 1.56.0, +`CMake `_ ≥ 3.1 to generate makefiles, and +`python `_ to compile the GUDHI Python module. +It is a multi-platform library and compiles on Linux, Mac OSX and Visual +Studio 2015. + +On `Windows `_ , only Python +3.5 and 3.6 are available because of the required Visual Studio version. + +On other systems, if you have several Python/python installed, the version 2.X +will be used by default, but you can force it by adding +:code:`-DPython_ADDITIONAL_VERSIONS=3` to the cmake command. + +GUDHI Python module compilation +=============================== + +To build the GUDHI Python module, run the following commands in a terminal: + +.. code-block:: bash + + cd /path-to-gudhi/ + mkdir build + cd build/ + cmake .. + cd python + make + +GUDHI Python module installation +================================ + +Once the compilation succeeds, one can add the GUDHI Python module path to the +PYTHONPATH: + +.. code-block:: bash + + # For windows, you have to set PYTHONPATH environment variable + export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/python' + +Or install it definitely in your Python packages folder: + +.. code-block:: bash + + cd /path-to-gudhi/build/python + # May require sudo or administrator privileges + make install + + +Test suites +=========== + +To test your build, `py.test `_ is optional. Run the +following command in a terminal: + +.. code-block:: bash + + cd /path-to-gudhi/build/python + # For windows, you have to set PYTHONPATH environment variable + export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/python' + make test + +Debugging issues +================ + +If tests fail, please check your PYTHONPATH and try to :code:`import gudhi` +and check the errors. +The problem can come from a third-party library bad link or installation. + +If :code:`import gudhi` succeeds, please have a look to debug information: + +.. code-block:: python + + import gudhi + print(gudhi.__debug_info__) + +You shall have something like: + +.. code-block:: none + + Python version 2.7.15 + python version 0.26.1 + Eigen3 version 3.1.1 + Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex; + persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;alpha_complex; + euclidean_witness_complex;euclidean_strong_witness_complex; + Missing modules are: bottleneck_distance;nerve_gic;subsampling;tangential_complex;persistence_graphical_tools; + CGAL version 4.7.1000 + GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so + GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so + TBB version 9107 found and used + +Here, you can see that bottleneck_distance, nerve_gic, subsampling and +tangential_complex are missing because of the CGAL version. +persistence_graphical_tools is not available as numpy and matplotlib are not +available. +Unitary tests cannot be run as pytest is missing. + +A complete configuration would be : + +.. code-block:: none + + Python version 3.6.5 + python version 0.28.2 + Pytest version 3.3.2 + Matplotlib version 2.2.2 + Numpy version 1.14.5 + Eigen3 version 3.3.4 + Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex; + persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;persistence_graphical_tools; + bottleneck_distance;nerve_gic;subsampling;tangential_complex;alpha_complex;euclidean_witness_complex; + euclidean_strong_witness_complex; + CGAL header only version 4.11.0 + GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so + GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so + TBB version 9107 found and used + +Documentation +============= + +To build the documentation, `sphinx-doc `_ and +`sphinxcontrib-bibtex `_ are +required. As the documentation is auto-tested, `CGAL`_, `Eigen3`_, +`Matplotlib`_, `NumPy`_ and `SciPy`_ are also mandatory to build the +documentation. + +Run the following commands in a terminal: + +.. code-block:: bash + + cd /path-to-gudhi/build/python + make sphinx + +Optional third-party library +**************************** + +CGAL +==== + +The :doc:`Alpha complex `, +:doc:`Tangential complex ` and +:doc:`Witness complex ` data structures, and +:doc:`Bottleneck distance ` requires CGAL, which is a +C++ library which provides easy access to efficient and reliable geometric +algorithms. + +The procedure to install this library +according to your operating system is detailed +`here `_. + +The following examples requires CGAL version ≥ 4.11.0: + +.. only:: builder_html + + * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` + * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>` + * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>` + * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` + * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` + * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` + +Eigen3 +====== + +The :doc:`Alpha complex `, +:doc:`Tangential complex ` and +:doc:`Witness complex ` data structures and few +examples requires `Eigen3 `_, a C++ template +library for linear algebra: matrices, vectors, numerical solvers, and related +algorithms. + +The following examples require the `Eigen3 `_: + +.. only:: builder_html + + * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` + * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>` + * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` + * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` + * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` + +Matplotlib +========== + +The :doc:`persistence graphical tools ` +module requires `Matplotlib `_, a Python 2D plotting +library which produces publication quality figures in a variety of hardcopy +formats and interactive environments across platforms. + +The following examples require the `Matplotlib `_: + +.. only:: builder_html + + * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` + * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>` + * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` + * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` + * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` + * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` + * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` + * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` + * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` + +NumPy +===== + +The :doc:`persistence graphical tools ` +module requires `NumPy `_, a fundamental package for +scientific computing with Python. + +The following examples require the `NumPy `_: + +.. only:: builder_html + + * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` + * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>` + * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` + * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` + * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` + * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` + * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` + * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` + * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>` + +SciPy +===== + +The :doc:`persistence graphical tools ` +module requires `SciPy `_, a Python-based ecosystem of +open-source software for mathematics, science, and engineering. + +Threading Building Blocks +========================= + +`Intel® TBB `_ lets you easily write +parallel C++ programs that take full advantage of multicore performance, that +are portable and composable, and that have future-proof scalability. + +Having Intel® TBB installed is recommended to parallelize and accelerate some +GUDHI computations. + +Bug reports and contributions +***************************** + +Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: + + Contact: gudhi-users@lists.gforge.inria.fr + +GUDHI is open to external contributions. If you want to join our development team, please contact us. diff --git a/src/python/doc/nerve_gic_complex_ref.rst b/src/python/doc/nerve_gic_complex_ref.rst new file mode 100644 index 00000000..abde2e8c --- /dev/null +++ b/src/python/doc/nerve_gic_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +================================ +Cover complexes reference manual +================================ + +.. autoclass:: gudhi.CoverComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.CoverComplex.__init__ diff --git a/src/python/doc/nerve_gic_complex_sum.inc b/src/python/doc/nerve_gic_complex_sum.inc new file mode 100644 index 00000000..d633c4ff --- /dev/null +++ b/src/python/doc/nerve_gic_complex_sum.inc @@ -0,0 +1,16 @@ +.. table:: + :widths: 30 50 20 + + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière | + | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | | + | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 | + | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | | + | | the image of a scalar-valued function defined on the data. | :Copyright: MIT (`GPL v3 `_) | + | | | | + | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | + | | | | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | * :doc:`nerve_gic_complex_user` | * :doc:`nerve_gic_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/nerve_gic_complex_user.rst b/src/python/doc/nerve_gic_complex_user.rst new file mode 100644 index 00000000..9101f45d --- /dev/null +++ b/src/python/doc/nerve_gic_complex_user.rst @@ -0,0 +1,315 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Cover complexes user manual +=========================== +Definition +---------- + +.. include:: nerve_gic_complex_sum.inc + +Visualizations of the simplicial complexes can be done with either +neato (from `graphviz `_), +`geomview `_, +`KeplerMapper `_. +Input point clouds are assumed to be OFF files (cf. :doc:`fileformats`). + +Covers +------ + +Nerves and Graph Induced Complexes require a cover C of the input point cloud P, +that is a set of subsets of P whose union is P itself. +Very often, this cover is obtained from the preimage of a family of intervals covering +the image of some scalar-valued function f defined on P. This family is parameterized +by its resolution, which can be either the number or the length of the intervals, +and its gain, which is the overlap percentage between consecutive intervals (ordered by their first values). + +Nerves +------ + +Nerve definition +^^^^^^^^^^^^^^^^ + +Assume you are given a cover C of your point cloud P. Then, the Nerve of this cover +is the simplicial complex that has one k-simplex per k-fold intersection of cover elements. +See also `Wikipedia `_. + +.. figure:: + ../../doc/Nerve_GIC/nerve.png + :figclass: align-center + :alt: Nerve of a double torus + + Nerve of a double torus + +Example +^^^^^^^ + +This example builds the Nerve of a point cloud sampled on a 3D human shape (human.off). +The cover C comes from the preimages of intervals (10 intervals with gain 0.3) +covering the height function (coordinate 2), +which are then refined into their connected components using the triangulation of the .OFF file. + +.. testcode:: + + import gudhi + nerve_complex = gudhi.CoverComplex() + nerve_complex.set_verbose(True) + + if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ + '/data/points/human.off')): + nerve_complex.set_type('Nerve') + nerve_complex.set_color_from_coordinate(2) + nerve_complex.set_function_from_coordinate(2) + nerve_complex.set_graph_from_OFF() + nerve_complex.set_resolution_with_interval_number(10) + nerve_complex.set_gain(0.3) + nerve_complex.set_cover_from_function() + nerve_complex.find_simplices() + nerve_complex.write_info() + simplex_tree = nerve_complex.create_simplex_tree() + nerve_complex.compute_PD() + result_str = 'Nerve is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + for filtered_value in simplex_tree.get_filtration(): + print(filtered_value[0]) + +the program output is: + +.. code-block:: none + + Min function value = -0.979672 and Max function value = 0.816414 + Interval 0 = [-0.979672, -0.761576] + Interval 1 = [-0.838551, -0.581967] + Interval 2 = [-0.658942, -0.402359] + Interval 3 = [-0.479334, -0.22275] + Interval 4 = [-0.299725, -0.0431414] + Interval 5 = [-0.120117, 0.136467] + Interval 6 = [0.059492, 0.316076] + Interval 7 = [0.239101, 0.495684] + Interval 8 = [0.418709, 0.675293] + Interval 9 = [0.598318, 0.816414] + Computing preimages... + Computing connected components... + 5 interval(s) in dimension 0: + [-0.909111, 0.0081753] + [-0.171433, 0.367393] + [-0.171433, 0.367393] + [-0.909111, 0.745853] + 0 interval(s) in dimension 1: + +.. testoutput:: + + Nerve is of dimension 1 - 41 simplices - 21 vertices. + [0] + [1] + [4] + [1, 4] + [2] + [0, 2] + [8] + [2, 8] + [5] + [4, 5] + [9] + [8, 9] + [13] + [5, 13] + [14] + [9, 14] + [19] + [13, 19] + [25] + [32] + [20] + [20, 32] + [33] + [25, 33] + [26] + [14, 26] + [19, 26] + [42] + [26, 42] + [34] + [33, 34] + [27] + [20, 27] + [35] + [27, 35] + [34, 35] + [35, 42] + [44] + [35, 44] + [54] + [44, 54] + + +The program also writes a file ../../data/points/human.off_sc.txt. The first +three lines in this file are the location of the input point cloud and the +function used to compute the cover. +The fourth line contains the number of vertices nv and edges ne of the Nerve. +The next nv lines represent the vertices. Each line contains the vertex ID, +the number of data points it contains, and their average color function value. +Finally, the next ne lines represent the edges, characterized by the ID of +their vertices. + +Using KeplerMapper, one can obtain the following visualization: + +.. figure:: + ../../doc/Nerve_GIC/nervevisu.jpg + :figclass: align-center + :alt: Visualization with KeplerMapper + + Visualization with KeplerMapper + +Graph Induced Complexes (GIC) +----------------------------- + +GIC definition +^^^^^^^^^^^^^^ + +Again, assume you are given a cover C of your point cloud P. Moreover, assume +you are also given a graph G built on top of P. Then, for any clique in G +whose nodes all belong to different elements of C, the GIC includes a +corresponding simplex, whose dimension is the number of nodes in the clique +minus one. +See :cite:`Dey13` for more details. + +.. figure:: + ../../doc/Nerve_GIC/GIC.jpg + :figclass: align-center + :alt: GIC of a point cloud + + GIC of a point cloud + +Example with cover from Voronoï +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This example builds the GIC of a point cloud sampled on a 3D human shape +(human.off). +We randomly subsampled 100 points in the point cloud, which act as seeds of +a geodesic Voronoï diagram. Each cell of the diagram is then an element of C. +The graph G (used to compute both the geodesics for Voronoï and the GIC) +comes from the triangulation of the human shape. Note that the resulting +simplicial complex is in dimension 3 in this example. + +.. testcode:: + + import gudhi + nerve_complex = gudhi.CoverComplex() + + if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ + '/data/points/human.off')): + nerve_complex.set_type('GIC') + nerve_complex.set_color_from_coordinate() + nerve_complex.set_graph_from_OFF() + nerve_complex.set_cover_from_Voronoi(700) + nerve_complex.find_simplices() + nerve_complex.plot_off() + +the program outputs SC.off. Using e.g. + +.. code-block:: none + + geomview ../../data/points/human.off_sc.off + +one can obtain the following visualization: + +.. figure:: + ../../doc/Nerve_GIC/gicvoronoivisu.jpg + :figclass: align-center + :alt: Visualization with Geomview + + Visualization with Geomview + +Functional GIC +^^^^^^^^^^^^^^ + +If one restricts to the cliques in G whose nodes all belong to preimages of +consecutive intervals (assuming the cover of the height function is minimal, +i.e. no more than two intervals can intersect at a time), the GIC is of +dimension one, i.e. a graph. +We call this graph the functional GIC. See :cite:`Carriere16` for more details. + +Example +^^^^^^^ + +Functional GIC comes with automatic selection of the Rips threshold, +the resolution and the gain of the function cover. See :cite:`Carriere17c` for +more details. In this example, we compute the functional GIC of a Klein bottle +embedded in R^5, where the graph G comes from a Rips complex with automatic +threshold, and the cover C comes from the preimages of intervals covering the +first coordinate, with automatic resolution and gain. Note that automatic +threshold, resolution and gain can be computed as well for the Nerve. + +.. testcode:: + + import gudhi + nerve_complex = gudhi.CoverComplex() + + if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ + '/data/points/KleinBottle5D.off')): + nerve_complex.set_type('GIC') + nerve_complex.set_color_from_coordinate(0) + nerve_complex.set_function_from_coordinate(0) + nerve_complex.set_graph_from_automatic_rips() + nerve_complex.set_automatic_resolution() + nerve_complex.set_gain() + nerve_complex.set_cover_from_function() + nerve_complex.find_simplices() + nerve_complex.plot_dot() + +the program outputs SC.dot. Using e.g. + +.. code-block:: none + + neato ../../data/points/KleinBottle5D.off_sc.dot -Tpdf -o ../../data/points/KleinBottle5D.off_sc.pdf + +one can obtain the following visualization: + +.. figure:: + ../../doc/Nerve_GIC/coordGICvisu2.jpg + :figclass: align-center + :alt: Visualization with neato + + Visualization with neato + +where nodes are colored by the filter function values and, for each node, the +first number is its ID and the second is the number of data points that its +contain. + +We also provide an example on a set of 72 pictures taken around the same object +(lucky_cat.off). +The function is now the first eigenfunction given by PCA, whose values are +written in a file (lucky_cat_PCA1). Threshold, resolution and gain are +automatically selected as before. + +.. testcode:: + + import gudhi + nerve_complex = gudhi.CoverComplex() + + if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \ + '/data/points/COIL_database/lucky_cat.off')): + nerve_complex.set_type('GIC') + pca_file = gudhi.__root_source_dir__ + \ + '/data/points/COIL_database/lucky_cat_PCA1' + nerve_complex.set_color_from_file(pca_file) + nerve_complex.set_function_from_file(pca_file) + nerve_complex.set_graph_from_automatic_rips() + nerve_complex.set_automatic_resolution() + nerve_complex.set_gain() + nerve_complex.set_cover_from_function() + nerve_complex.find_simplices() + nerve_complex.plot_dot() + +the program outputs again SC.dot which gives the following visualization after using neato: + +.. figure:: + ../../doc/Nerve_GIC/funcGICvisu.jpg + :figclass: align-center + :alt: Visualization with neato + + Visualization with neato diff --git a/src/python/doc/periodic_cubical_complex_ref.rst b/src/python/doc/periodic_cubical_complex_ref.rst new file mode 100644 index 00000000..4b831647 --- /dev/null +++ b/src/python/doc/periodic_cubical_complex_ref.rst @@ -0,0 +1,13 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Periodic cubical complex reference manual +######################################### + +.. autoclass:: gudhi.PeriodicCubicalComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.PeriodicCubicalComplex.__init__ diff --git a/src/python/doc/persistence_graphical_tools_ref.rst b/src/python/doc/persistence_graphical_tools_ref.rst new file mode 100644 index 00000000..0b0038d9 --- /dev/null +++ b/src/python/doc/persistence_graphical_tools_ref.rst @@ -0,0 +1,11 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +============================================ +Persistence graphical tools reference manual +============================================ + +.. autofunction:: gudhi.plot_persistence_barcode +.. autofunction:: gudhi.plot_persistence_diagram +.. autofunction:: gudhi.plot_persistence_density diff --git a/src/python/doc/persistence_graphical_tools_sum.inc b/src/python/doc/persistence_graphical_tools_sum.inc new file mode 100644 index 00000000..0cdf8072 --- /dev/null +++ b/src/python/doc/persistence_graphical_tools_sum.inc @@ -0,0 +1,14 @@ +.. table:: + :widths: 30 50 20 + + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | .. figure:: | These graphical tools comes on top of persistence results and allows | :Author: Vincent Rouvreau | + | img/graphical_tools_representation.png | the user to build easily persistence barcode, diagram or density. | | + | | | :Introduced in: GUDHI 2.0.0 | + | | | | + | | | :Copyright: MIT | + | | | | + | | | :Requires: matplotlib, numpy and scipy | + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | * :doc:`persistence_graphical_tools_user` | * :doc:`persistence_graphical_tools_ref` | + +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/persistence_graphical_tools_user.rst b/src/python/doc/persistence_graphical_tools_user.rst new file mode 100644 index 00000000..b2124fdd --- /dev/null +++ b/src/python/doc/persistence_graphical_tools_user.rst @@ -0,0 +1,73 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Persistence graphical tools user manual +======================================= +Definition +---------- +.. include:: persistence_graphical_tools_sum.inc + + +Show persistence as a barcode +----------------------------- + +.. note:: + this function requires matplotlib and numpy to be available + +This function can display the persistence result as a barcode: + +.. plot:: + :include-source: + + import gudhi + + off_file = gudhi.__root_source_dir__ + '/data/points/tore3D_300.off' + point_cloud = gudhi.read_off(off_file=off_file) + + rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=0.7) + simplex_tree = rips_complex.create_simplex_tree(max_dimension=3) + diag = simplex_tree.persistence(min_persistence=0.4) + + plot = gudhi.plot_persistence_barcode(diag) + plot.show() + +Show persistence as a diagram +----------------------------- + +.. note:: + this function requires matplotlib and numpy to be available + +This function can display the persistence result as a diagram: + +.. plot:: + :include-source: + + import gudhi + + # rips_on_tore3D_1307.pers obtained from write_persistence_diagram method + persistence_file=gudhi.__root_source_dir__ + \ + '/data/persistence_diagram/rips_on_tore3D_1307.pers' + plt = gudhi.plot_persistence_diagram(persistence_file=persistence_file, + legend=True) + plt.show() + +Persistence density +------------------- + +.. note:: + this function requires matplotlib, numpy and scipy to be available + +If you want more information on a specific dimension, for instance: + +.. plot:: + :include-source: + + import gudhi + + # rips_on_tore3D_1307.pers obtained from write_persistence_diagram method + persistence_file=gudhi.__root_source_dir__ + \ + '/data/persistence_diagram/rips_on_tore3D_1307.pers' + plt = gudhi.plot_persistence_density(persistence_file=persistence_file, + max_intervals=0, dimension=1, legend=True) + plt.show() diff --git a/src/python/doc/persistent_cohomology_sum.inc b/src/python/doc/persistent_cohomology_sum.inc new file mode 100644 index 00000000..4d7b077e --- /dev/null +++ b/src/python/doc/persistent_cohomology_sum.inc @@ -0,0 +1,26 @@ +.. table:: + :widths: 30 50 20 + + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | .. figure:: | The theory of homology consists in attaching to a topological space | :Author: Clément Maria | + | ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | | + | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Introduced in: GUDHI 2.0.0 | + | | homology studies the evolution -- birth, life and death -- of these | | + | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :Copyright: MIT | + | Torus | theory is essentially composed of three elements: topological spaces, | | + | | their homology groups and an evolution scheme. | | + | | | | + | | Computation of persistent cohomology using the algorithm of | | + | | :cite:`DBLP:journals/dcg/SilvaMV11` and | | + | | :cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed | | + | | Annotation Matrix implementation of | | + | | :cite:`DBLP:conf/esa/BoissonnatDM13`. | | + | | | | + +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+ + | * :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | + | | feature for reference: | + | | | + | | * :doc:`simplex_tree_ref` | + | | * :doc:`cubical_complex_ref` | + | | * :doc:`periodic_cubical_complex_ref` | + +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/persistent_cohomology_user.rst b/src/python/doc/persistent_cohomology_user.rst new file mode 100644 index 00000000..de83cda1 --- /dev/null +++ b/src/python/doc/persistent_cohomology_user.rst @@ -0,0 +1,120 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Persistent cohomology user manual +================================= +Definition +---------- +===================================== ===================================== ===================================== +:Author: Clément Maria :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3 +===================================== ===================================== ===================================== + ++-----------------------------------------------------------------+-----------------------------------------------------------------------+ +| :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence | +| | feature for reference: | +| | | +| | * :doc:`simplex_tree_ref` | +| | * :doc:`cubical_complex_ref` | +| | * :doc:`periodic_cubical_complex_ref` | ++-----------------------------------------------------------------+-----------------------------------------------------------------------+ + + +Computation of persistent cohomology using the algorithm of :cite:`DBLP:journals/dcg/SilvaMV11` and +:cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed Annotation Matrix implementation of +:cite:`DBLP:conf/esa/BoissonnatDM13`. + +The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing global +topological features like connected components, holes, cavities, etc. Persistent homology studies the evolution -- +birth, life and death -- of these features when the topological space is changing. Consequently, the theory is +essentially composed of three elements: + +* topological spaces +* their homology groups +* an evolution scheme. + +Topological Spaces +------------------ + +Topological spaces are represented by simplicial complexes. +Let :math:`V = \{1, \cdots ,|V|\}` be a set of *vertices*. +A *simplex* :math:`\sigma` is a subset of vertices :math:`\sigma \subseteq V`. +A *simplicial complex* :math:`\mathbf{K}` on :math:`V` is a collection of simplices :math:`\{\sigma\}`, +:math:`\sigma \subseteq V`, such that :math:`\tau \subseteq \sigma \in \mathbf{K} \Rightarrow \tau \in \mathbf{K}`. +The dimension :math:`n=|\sigma|-1` of :math:`\sigma` is its number of elements minus 1. +A *filtration* of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying +:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`. + +Homology +-------- + +For a ring :math:`\mathcal{R}`, the group of *n-chains*, denoted :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})`, of +:math:`\mathbf{K}` is the group of formal sums of n-simplices with :math:`\mathcal{R}` coefficients. The +*boundary operator* is a linear operator +:math:`\partial_n: \mathbf{C}_n(\mathbf{K},\mathcal{R}) \rightarrow \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R})` +such that :math:`\partial_n \sigma = \partial_n [v_0, \cdots , v_n] = \sum_{i=0}^n (-1)^{i}[v_0,\cdots ,\widehat{v_i}, \cdots,v_n]`, +where :math:`\widehat{v_i}` means :math:`v_i` is omitted from the list. The chain groups form a sequence: + +.. math:: + + \cdots \ \ \mathbf{C}_n(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_n\ } + \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R}) \xrightarrow{\partial_{n-1}} \cdots \xrightarrow{\ \partial_2 \ } + \mathbf{C}_1(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_1 \ } \mathbf{C}_0(\mathbf{K},\mathcal{R}) + +of finitely many groups :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})` and homomorphisms :math:`\partial_n`, indexed by +the dimension :math:`n \geq 0`. The boundary operators satisfy the property :math:`\partial_n \circ \partial_{n+1}=0` +for every :math:`n > 0` and we define the homology groups: + +.. math:: + + \mathbf{H}_n(\mathbf{K},\mathcal{R}) = \ker \partial_n / \mathrm{im} \ \partial_{n+1} + +We refer to :cite:`Munkres-elementsalgtop1984` for an introduction to homology +theory and to :cite:`DBLP:books/daglib/0025666` for an introduction to persistent homology. + +Indexing Scheme +--------------- + +"Changing" a simplicial complex consists in applying a simplicial map. An *indexing scheme* is a directed graph +together with a traversal order, such that two consecutive nodes in the graph are connected by an arrow (either forward +or backward). +The nodes represent simplicial complexes and the directed edges simplicial maps. + +From the computational point of view, there are two types of indexing schemes of interest in persistent homology: + +* linear ones + :math:`\bullet \longrightarrow \bullet \longrightarrow \cdots \longrightarrow \bullet \longrightarrow \bullet` + in persistent homology :cite:`DBLP:journals/dcg/ZomorodianC05`, +* zigzag ones + :math:`\bullet \longrightarrow \bullet \longleftarrow \cdots \longrightarrow \bullet \longleftarrow \bullet` + in zigzag persistent homology :cite:`DBLP:journals/focm/CarlssonS10`. + +These indexing schemes have a natural left-to-right traversal order, and we describe them with ranges and iterators. +In the current release of the Gudhi library, only the linear case is implemented. + +In the following, we consider the case where the indexing scheme is induced by a filtration. + +Ordering the simplices by increasing filtration values (breaking ties so as a simplex appears after its subsimplices of +same filtration value) provides an indexing scheme. + +Examples +-------- + +We provide several example files: run these examples with -h for details on their use. + +.. only:: builder_html + + * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>` + * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>` + * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>` + * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>` + * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>` + * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>` + * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>` + +Bibliography +============ + +.. bibliography:: ../../biblio/bibliography.bib + :filter: docnames + :style: unsrt diff --git a/src/python/doc/python3-sphinx-build.py b/src/python/doc/python3-sphinx-build.py new file mode 100755 index 00000000..84d158cf --- /dev/null +++ b/src/python/doc/python3-sphinx-build.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 + +""" +Emulate sphinx-build for python3 +""" + +from sys import exit, argv +from sphinx import main + +if __name__ == '__main__': + exit(main(argv)) diff --git a/src/python/doc/reader_utils_ref.rst b/src/python/doc/reader_utils_ref.rst new file mode 100644 index 00000000..f3ecebad --- /dev/null +++ b/src/python/doc/reader_utils_ref.rst @@ -0,0 +1,15 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +============================= +Reader utils reference manual +============================= + +.. autofunction:: gudhi.read_off + +.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file + +.. autofunction:: gudhi.read_persistence_intervals_grouped_by_dimension + +.. autofunction:: gudhi.read_persistence_intervals_in_dimension diff --git a/src/python/doc/rips_complex_ref.rst b/src/python/doc/rips_complex_ref.rst new file mode 100644 index 00000000..22b5616c --- /dev/null +++ b/src/python/doc/rips_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +============================= +Rips complex reference manual +============================= + +.. autoclass:: gudhi.RipsComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.RipsComplex.__init__ diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc new file mode 100644 index 00000000..857c6893 --- /dev/null +++ b/src/python/doc/rips_complex_sum.inc @@ -0,0 +1,16 @@ +.. table:: + :widths: 30 50 20 + + +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ + | .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse | + | ../../doc/Rips_complex/rips_complex_representation.png | graph. | | + | :figclass: align-center | | :Introduced in: GUDHI 2.0.0 | + | | The filtration value of each edge is computed from a user-given | | + | | distance function and is inserted until a user-given threshold | :Copyright: MIT | + | | value. | | + | | | | + | | This complex can be built from a point cloud and a distance function, | | + | | or from a distance matrix. | | + +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ + | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` | + +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst new file mode 100644 index 00000000..1d340dbe --- /dev/null +++ b/src/python/doc/rips_complex_user.rst @@ -0,0 +1,345 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Rips complex user manual +========================= +Definition +---------- + +==================================================================== ================================ ====================== +:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3 +==================================================================== ================================ ====================== + ++-------------------------------------------+----------------------------------------------------------------------+ +| :doc:`rips_complex_user` | :doc:`rips_complex_ref` | ++-------------------------------------------+----------------------------------------------------------------------+ + +The `Rips complex `_ is a simplicial complex that +generalizes proximity (:math:`\varepsilon`-ball) graphs to higher dimensions. The vertices correspond to the input +points, and a simplex is present if and only if its diameter is smaller than some parameter α. Considering all +parameters α defines a filtered simplicial complex, where the filtration value of a simplex is its diameter. +The filtration can be restricted to values α smaller than some threshold, to reduce its size. + +The input discrete metric space can be provided as a point cloud plus a distance function, or as a distance matrix. + +When creating a simplicial complex from the graph, :doc:`RipsComplex ` first builds the graph and +inserts it into the data structure. It then expands the simplicial complex (adds the simplices corresponding to cliques) +when required. The expansion can be stopped at dimension `max_dimension`, by default 1. + +A vertex name corresponds to the index of the point in the given range (aka. the point cloud). + +.. figure:: + ../../doc/Rips_complex/rips_complex_representation.png + :align: center + + Rips-complex one skeleton graph representation + +On this example, as edges (4,5), (4,6) and (5,6) are in the complex, simplex (4,5,6) is added with the filtration value +set with :math:`max(filtration(4,5), filtration(4,6), filtration(5,6))`. And so on for simplex (0,1,2,3). + +If the `RipsComplex` interfaces are not detailed enough for your need, please refer to rips_persistence_step_by_step.cpp +C++ example, where the graph construction over the Simplex_tree is more detailed. + +A Rips complex can easily become huge, even if we limit the length of the edges +and the dimension of the simplices. One easy trick, before building a Rips +complex on a point cloud, is to call `sparsify_point_set` which removes points +that are too close to each other. This does not change its persistence diagram +by more than the length used to define "too close". + +A more general technique is to use a sparse approximation of the Rips +introduced by Don Sheehy :cite:`sheehy13linear`. We are using the version +described in :cite:`buchet16efficient` (except that we multiply all filtration +values by 2, to match the usual Rips complex). :cite:`cavanna15geometric` proves +a :math:`\frac{1}{1-\varepsilon}`-interleaving, although in practice the +error is usually smaller. A more intuitive presentation of the idea is +available in :cite:`cavanna15geometric`, and in a video +:cite:`cavanna15visualizing`. Passing an extra argument `sparse=0.3` at the +construction of a `RipsComplex` object asks it to build a sparse Rips with +parameter :math:`\varepsilon=0.3`, while the default `sparse=None` builds the +regular Rips complex. + + +Point cloud +----------- + +Example from a point cloud +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This example builds the neighborhood graph from the given points, up to max_edge_length. +Then it creates a :doc:`Simplex_tree ` with it. + +Finally, it is asked to display information about the simplicial complex. + +.. testcode:: + + import gudhi + rips_complex = gudhi.RipsComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]], + max_edge_length=12.0) + + simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) + result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + fmt = '%s -> %.2f' + for filtered_value in simplex_tree.get_filtration(): + print(fmt % tuple(filtered_value)) + +When launching (Rips maximal distance between 2 points is 12.0, is expanded +until dimension 1 - one skeleton graph in other words), the output is: + +.. testoutput:: + + Rips complex is of dimension 1 - 18 simplices - 7 vertices. + [0] -> 0.00 + [1] -> 0.00 + [2] -> 0.00 + [3] -> 0.00 + [4] -> 0.00 + [5] -> 0.00 + [6] -> 0.00 + [2, 3] -> 5.00 + [4, 5] -> 5.39 + [0, 2] -> 5.83 + [0, 1] -> 6.08 + [1, 3] -> 6.32 + [1, 2] -> 6.71 + [5, 6] -> 7.28 + [2, 4] -> 8.94 + [0, 3] -> 9.43 + [4, 6] -> 9.49 + [3, 6] -> 11.00 + +Notice that if we use + +.. code-block:: python + + rips_complex = gudhi.RipsComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]], + max_edge_length=12.0, sparse=2) + +asking for a very sparse version (theory only gives some guarantee on the meaning of the output if `sparse<1`), +2 to 5 edges disappear, depending on the random vertex used to start the sparsification. + +Example from OFF file +^^^^^^^^^^^^^^^^^^^^^ + +This example builds the :doc:`RipsComplex ` from the given +points in an OFF file, and max_edge_length value. +Then it creates a :doc:`Simplex_tree ` with it. + +Finally, it is asked to display information about the Rips complex. + + +.. testcode:: + + import gudhi + point_cloud = gudhi.read_off(off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off') + rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=12.0) + simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) + result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + fmt = '%s -> %.2f' + for filtered_value in simplex_tree.get_filtration(): + print(fmt % tuple(filtered_value)) + +the program output is: + +.. testoutput:: + + Rips complex is of dimension 1 - 18 simplices - 7 vertices. + [0] -> 0.00 + [1] -> 0.00 + [2] -> 0.00 + [3] -> 0.00 + [4] -> 0.00 + [5] -> 0.00 + [6] -> 0.00 + [2, 3] -> 5.00 + [4, 5] -> 5.39 + [0, 2] -> 5.83 + [0, 1] -> 6.08 + [1, 3] -> 6.32 + [1, 2] -> 6.71 + [5, 6] -> 7.28 + [2, 4] -> 8.94 + [0, 3] -> 9.43 + [4, 6] -> 9.49 + [3, 6] -> 11.00 + +Distance matrix +--------------- + +Example from a distance matrix +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This example builds the one skeleton graph from the given distance matrix, and max_edge_length value. +Then it creates a :doc:`Simplex_tree ` with it. + +Finally, it is asked to display information about the simplicial complex. + +.. testcode:: + + import gudhi + rips_complex = gudhi.RipsComplex(distance_matrix=[[], + [6.0827625303], + [5.8309518948, 6.7082039325], + [9.4339811321, 6.3245553203, 5], + [13.0384048104, 15.6524758425, 8.94427191, 12.0415945788], + [18.0277563773, 19.6468827044, 13.152946438, 14.7648230602, 5.3851648071], + [17.88854382, 17.1172427686, 12.0830459736, 11, 9.4868329805, 7.2801098893]], + max_edge_length=12.0) + + simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) + result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + fmt = '%s -> %.2f' + for filtered_value in simplex_tree.get_filtration(): + print(fmt % tuple(filtered_value)) + +When launching (Rips maximal distance between 2 points is 12.0, is expanded +until dimension 1 - one skeleton graph in other words), the output is: + +.. testoutput:: + + Rips complex is of dimension 1 - 18 simplices - 7 vertices. + [0] -> 0.00 + [1] -> 0.00 + [2] -> 0.00 + [3] -> 0.00 + [4] -> 0.00 + [5] -> 0.00 + [6] -> 0.00 + [2, 3] -> 5.00 + [4, 5] -> 5.39 + [0, 2] -> 5.83 + [0, 1] -> 6.08 + [1, 3] -> 6.32 + [1, 2] -> 6.71 + [5, 6] -> 7.28 + [2, 4] -> 8.94 + [0, 3] -> 9.43 + [4, 6] -> 9.49 + [3, 6] -> 11.00 + +Example from csv file +^^^^^^^^^^^^^^^^^^^^^ + +This example builds the :doc:`RipsComplex ` from the given +distance matrix in a csv file, and max_edge_length value. +Then it creates a :doc:`Simplex_tree ` with it. + +Finally, it is asked to display information about the Rips complex. + + +.. testcode:: + + import gudhi + distance_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=gudhi.__root_source_dir__ + \ + '/data/distance_matrix/full_square_distance_matrix.csv') + rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, max_edge_length=12.0) + simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) + result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + fmt = '%s -> %.2f' + for filtered_value in simplex_tree.get_filtration(): + print(fmt % tuple(filtered_value)) + +the program output is: + +.. testoutput:: + + Rips complex is of dimension 1 - 18 simplices - 7 vertices. + [0] -> 0.00 + [1] -> 0.00 + [2] -> 0.00 + [3] -> 0.00 + [4] -> 0.00 + [5] -> 0.00 + [6] -> 0.00 + [2, 3] -> 5.00 + [4, 5] -> 5.39 + [0, 2] -> 5.83 + [0, 1] -> 6.08 + [1, 3] -> 6.32 + [1, 2] -> 6.71 + [5, 6] -> 7.28 + [2, 4] -> 8.94 + [0, 3] -> 9.43 + [4, 6] -> 9.49 + [3, 6] -> 11.00 + +Correlation matrix +------------------ + +Example from a correlation matrix +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Analogously to the case of distance matrix, Rips complexes can be also constructed based on correlation matrix. +Given a correlation matrix M, comportment-wise 1-M is a distance matrix. +This example builds the one skeleton graph from the given corelation matrix and threshold value. +Then it creates a :doc:`Simplex_tree ` with it. + +Finally, it is asked to display information about the simplicial complex. + +.. testcode:: + + import gudhi + import numpy as np + + # User defined correlation matrix is: + # |1 0.06 0.23 0.01 0.89| + # |0.06 1 0.74 0.01 0.61| + # |0.23 0.74 1 0.72 0.03| + # |0.01 0.01 0.72 1 0.7 | + # |0.89 0.61 0.03 0.7 1 | + correlation_matrix=np.array([[1., 0.06, 0.23, 0.01, 0.89], + [0.06, 1., 0.74, 0.01, 0.61], + [0.23, 0.74, 1., 0.72, 0.03], + [0.01, 0.01, 0.72, 1., 0.7], + [0.89, 0.61, 0.03, 0.7, 1.]], float) + + distance_matrix = np.ones((correlation_matrix.shape),float) - correlation_matrix + rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0) + + simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) + result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \ + repr(simplex_tree.num_simplices()) + ' simplices - ' + \ + repr(simplex_tree.num_vertices()) + ' vertices.' + print(result_str) + fmt = '%s -> %.2f' + for filtered_value in simplex_tree.get_filtration(): + print(fmt % tuple(filtered_value)) + +When launching (Rips maximal distance between 2 points is 12.0, is expanded +until dimension 1 - one skeleton graph in other words), the output is: + +.. testoutput:: + + Rips complex is of dimension 1 - 15 simplices - 5 vertices. + [0] -> 0.00 + [1] -> 0.00 + [2] -> 0.00 + [3] -> 0.00 + [4] -> 0.00 + [0, 4] -> 0.11 + [1, 2] -> 0.26 + [2, 3] -> 0.28 + [3, 4] -> 0.30 + [1, 4] -> 0.39 + [0, 2] -> 0.77 + [0, 1] -> 0.94 + [2, 4] -> 0.97 + [0, 3] -> 0.99 + [1, 3] -> 0.99 + +.. note:: + As persistence diagrams points will be under the diagonal, + bottleneck distance and persistence graphical tool will not work properly, + this is a known issue. diff --git a/src/python/doc/simplex_tree_ref.rst b/src/python/doc/simplex_tree_ref.rst new file mode 100644 index 00000000..9eb8c199 --- /dev/null +++ b/src/python/doc/simplex_tree_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +============================= +Simplex tree reference manual +============================= + +.. autoclass:: gudhi.SimplexTree + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.SimplexTree.__init__ diff --git a/src/python/doc/simplex_tree_sum.inc b/src/python/doc/simplex_tree_sum.inc new file mode 100644 index 00000000..5ba58d2b --- /dev/null +++ b/src/python/doc/simplex_tree_sum.inc @@ -0,0 +1,13 @@ +.. table:: + :widths: 30 50 20 + + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ + | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria | + | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | | + | :alt: Simplex tree representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The data structure is described in | | + | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: MIT | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ + | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` | + +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/simplex_tree_user.rst b/src/python/doc/simplex_tree_user.rst new file mode 100644 index 00000000..aebeb29f --- /dev/null +++ b/src/python/doc/simplex_tree_user.rst @@ -0,0 +1,72 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Simplex tree user manual +======================== +Definition +---------- + +.. include:: simplex_tree_sum.inc + +A simplicial complex :math:`\mathbf{K}` on a set of vertices :math:`V = \{1, \cdots ,|V|\}` is a collection of +simplices :math:`\{\sigma\}`, :math:`\sigma \subseteq V` such that +:math:`\tau \subseteq \sigma \in \mathbf{K} \rightarrow \tau \in \mathbf{K}`. The dimension :math:`n=|\sigma|-1` of +:math:`\sigma` is its number of elements minus `1`. + +A filtration of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying +:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`. Ordering the simplices by increasing filtration +values (breaking ties so as a simplex appears after its subsimplices of same filtration value) provides an indexing +scheme. + + +Implementation +-------------- + +There are two implementation of complexes. The first on is the Simplex_tree data structure. +The simplex tree is an efficient and flexible data structure for representing general (filtered) simplicial complexes. +The data structure is described in :cite`boissonnatmariasimplextreealgorithmica`. + +The second one is the Hasse_complex. The Hasse complex is a data structure representing explicitly all co-dimension 1 +incidence relations in a complex. It is consequently faster when accessing the boundary of a simplex, but is less +compact and harder to construct from scratch. + +Example +------- + +.. testcode:: + + import gudhi + st = gudhi.SimplexTree() + if st.insert([0, 1]): + print("[0, 1] inserted") + if st.insert([0, 1, 2], filtration=4.0): + print("[0, 1, 2] inserted") + if st.find([0, 1]): + print("[0, 1] found") + result_str = 'num_vertices=' + repr(st.num_vertices()) + print(result_str) + result_str = 'num_simplices=' + repr(st.num_simplices()) + print(result_str) + print("skeleton(2) =") + for sk_value in st.get_skeleton(2): + print(sk_value) + + +The output is: + +.. testoutput:: + + [0, 1] inserted + [0, 1, 2] inserted + [0, 1] found + num_vertices=3 + num_simplices=7 + skeleton(2) = + ([0, 1, 2], 4.0) + ([0, 1], 0.0) + ([0, 2], 4.0) + ([0], 0.0) + ([1, 2], 4.0) + ([1], 0.0) + ([2], 4.0) diff --git a/src/python/doc/strong_witness_complex_ref.rst b/src/python/doc/strong_witness_complex_ref.rst new file mode 100644 index 00000000..d624d711 --- /dev/null +++ b/src/python/doc/strong_witness_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +======================================= +Strong witness complex reference manual +======================================= + +.. autoclass:: gudhi.StrongWitnessComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.StrongWitnessComplex.__init__ diff --git a/src/python/doc/tangential_complex_ref.rst b/src/python/doc/tangential_complex_ref.rst new file mode 100644 index 00000000..cdfda082 --- /dev/null +++ b/src/python/doc/tangential_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +=================================== +Tangential complex reference manual +=================================== + +.. autoclass:: gudhi.TangentialComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.TangentialComplex.__init__ diff --git a/src/python/doc/tangential_complex_sum.inc b/src/python/doc/tangential_complex_sum.inc new file mode 100644 index 00000000..c8bc1177 --- /dev/null +++ b/src/python/doc/tangential_complex_sum.inc @@ -0,0 +1,14 @@ +.. table:: + :widths: 30 50 20 + + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | + | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | + | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 | + | | an unknown manifold. The running time depends only linearly on the | | + | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: MIT (`GPL v3 `_) | + | | dimension :math:`k`. | | + | | | :Requires: `CGAL `__ :math:`\geq` 4.11.0 | + +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+ + | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` | + +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/tangential_complex_user.rst b/src/python/doc/tangential_complex_user.rst new file mode 100644 index 00000000..ebfe1e29 --- /dev/null +++ b/src/python/doc/tangential_complex_user.rst @@ -0,0 +1,204 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Tangential complex user manual +============================== +.. include:: tangential_complex_sum.inc + +Definition +---------- + +A Tangential Delaunay complex is a simplicial complex designed to reconstruct a +:math:`k`-dimensional smooth manifold embedded in :math:`d`-dimensional +Euclidean space. The input is a point sample coming from an unknown manifold, +which means that the points lie close to a structure of "small" intrinsic +dimension. The running time depends only linearly on the extrinsic dimension +:math:`d` and exponentially on the intrinsic dimension :math:`k`. + +An extensive description of the Tangential complex can be found in +:cite:`tangentialcomplex2014`. + +What is a Tangential Complex? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Let us start with the description of the Tangential complex of a simple +example, with :math:`k = 1` and :math:`d = 2`. The point set +:math:`\mathscr P` is located on a closed curve embedded in 2D. +Only 4 points will be displayed (more are required for PCA) to simplify the +figures. + +.. figure:: ../../doc/Tangential_complex/tc_example_01.png + :alt: The input + :figclass: align-center + + The input + +For each point :math:`P`, estimate its tangent subspace :math:`T_P` using PCA. + +.. figure:: ../../doc/Tangential_complex/tc_example_02.png + :alt: The estimated normals + :figclass: align-center + + The estimated normals + + +Let us add the Voronoi diagram of the points in orange. For each point +:math:`P`, construct its star in the Delaunay triangulation of +:math:`\mathscr P` restricted to :math:`T_P`. + +.. figure:: ../../doc/Tangential_complex/tc_example_03.png + :alt: The Voronoi diagram + :figclass: align-center + + The Voronoi diagram + +The Tangential Delaunay complex is the union of those stars. + +In practice, neither the ambient Voronoi diagram nor the ambient Delaunay +triangulation is computed. Instead, local :math:`k`-dimensional regular +triangulations are computed with a limited number of points as we only need the +star of each point. More details can be found in :cite:`tangentialcomplex2014`. + +Inconsistencies +^^^^^^^^^^^^^^^ +Inconsistencies between the stars can occur. An inconsistency occurs when a +simplex is not in the star of all its vertices. + +Let us take the same example. + +.. figure:: ../../doc/Tangential_complex/tc_example_07_before.png + :alt: Before + :figclass: align-center + + Before + +Let us slightly move the tangent subspace :math:`T_Q` + +.. figure:: ../../doc/Tangential_complex/tc_example_07_after.png + :alt: After + :figclass: align-center + + After + +Now, the star of :math:`Q` contains :math:`QP`, but the star of :math:`P` does +not contain :math:`QP`. We have an inconsistency. + +.. figure:: ../../doc/Tangential_complex/tc_example_08.png + :alt: After + :figclass: align-center + + After + +One way to solve inconsistencies is to randomly perturb the positions of the +points involved in an inconsistency. In the current implementation, this +perturbation is done in the tangent subspace of each point. The maximum +perturbation radius is given as a parameter to the constructor. + +In most cases, we recommend to provide a point set where the minimum distance +between any two points is not too small. This can be achieved using the +functions provided by the Subsampling module. Then, a good value to start with +for the maximum perturbation radius would be around half the minimum distance +between any two points. The Example with perturbation below shows an example of +such a process. + +In most cases, this process is able to dramatically reduce the number of +inconsistencies, but is not guaranteed to succeed. + +Output +^^^^^^ +The result of the computation is exported as a Simplex_tree. It is the union of +the stars of all the input points. A vertex in the Simplex Tree is the index of +the point in the range provided by the user. The point corresponding to a +vertex can also be obtained through the Tangential_complex::get_point function. +Note that even if the positions of the points are perturbed, their original +positions are kept (e.g. Tangential_complex::get_point returns the original +position of the point). + +The result can be obtained after the computation of the Tangential complex +itself and/or after the perturbation process. + + +Simple example +-------------- + +This example builds the Tangential complex of point set read in an OFF file. + +.. testcode:: + + import gudhi + tc = gudhi.TangentialComplex(intrisic_dim = 1, + off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off') + tc.compute_tangential_complex() + result_str = 'Tangential contains ' + repr(tc.num_simplices()) + \ + ' simplices - ' + repr(tc.num_vertices()) + ' vertices.' + print(result_str) + + st = tc.create_simplex_tree() + result_str = 'Simplex tree is of dimension ' + repr(st.dimension()) + \ + ' - ' + repr(st.num_simplices()) + ' simplices - ' + \ + repr(st.num_vertices()) + ' vertices.' + print(result_str) + for filtered_value in st.get_filtration(): + print(filtered_value[0]) + +The output is: + +.. testoutput:: + + Tangential contains 12 simplices - 7 vertices. + Simplex tree is of dimension 1 - 15 simplices - 7 vertices. + [0] + [1] + [0, 1] + [2] + [0, 2] + [1, 2] + [3] + [1, 3] + [4] + [2, 4] + [5] + [4, 5] + [6] + [3, 6] + [5, 6] + + +Example with perturbation +------------------------- + +This example builds the Tangential complex of a point set, then tries to solve +inconsistencies by perturbing the positions of points involved in inconsistent +simplices. + +.. testcode:: + + import gudhi + tc = gudhi.TangentialComplex(intrisic_dim = 1, + points=[[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]) + tc.compute_tangential_complex() + result_str = 'Tangential contains ' + repr(tc.num_vertices()) + ' vertices.' + print(result_str) + + if tc.num_inconsistent_simplices() > 0: + print('Tangential contains inconsistencies.') + + tc.fix_inconsistencies_using_perturbation(10, 60) + if tc.num_inconsistent_simplices() == 0: + print('Inconsistencies has been fixed.') + +The output is: + +.. testoutput:: + + Tangential contains 4 vertices. + Inconsistencies has been fixed. + + +Bibliography +============ + +.. bibliography:: ../../biblio/bibliography.bib + :filter: docnames + :style: unsrt diff --git a/src/python/doc/todos.rst b/src/python/doc/todos.rst new file mode 100644 index 00000000..ca274ced --- /dev/null +++ b/src/python/doc/todos.rst @@ -0,0 +1,9 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +========== +To be done +========== + +.. todolist:: diff --git a/src/python/doc/witness_complex_ref.rst b/src/python/doc/witness_complex_ref.rst new file mode 100644 index 00000000..9987d3fd --- /dev/null +++ b/src/python/doc/witness_complex_ref.rst @@ -0,0 +1,14 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +================================ +Witness complex reference manual +================================ + +.. autoclass:: gudhi.WitnessComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.WitnessComplex.__init__ diff --git a/src/python/doc/witness_complex_sum.inc b/src/python/doc/witness_complex_sum.inc new file mode 100644 index 00000000..2be8b220 --- /dev/null +++ b/src/python/doc/witness_complex_sum.inc @@ -0,0 +1,18 @@ +.. table:: + :widths: 30 50 20 + + +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ + | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich | + | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | | + | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 | + | :figclass: align-center | The data structure is described in | | + | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 `_ for Euclidean versions only) | + | | | | + | | | :Requires: `Eigen3 `__ and `CGAL `__ :math:`\geq` 4.11.0 for Euclidean versions only | + +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+ + | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` | + | | * :doc:`strong_witness_complex_ref` | + | | * :doc:`euclidean_witness_complex_ref` | + | | * :doc:`euclidean_strong_witness_complex_ref` | + +-------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + diff --git a/src/python/doc/witness_complex_user.rst b/src/python/doc/witness_complex_user.rst new file mode 100644 index 00000000..40e94134 --- /dev/null +++ b/src/python/doc/witness_complex_user.rst @@ -0,0 +1,135 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +Witness complex user manual +=========================== + +.. include:: witness_complex_sum.inc + +Definitions +----------- + +Witness complex is a simplicial complex defined on two sets of points in :math:`\mathbb{R}^D`: + +- :math:`W` set of **witnesses** and +- :math:`L` set of **landmarks**. + +Even though often the set of landmarks :math:`L` is a subset of the set of witnesses :math:`W`, it is not a requirement +for the current implementation. + +Landmarks are the vertices of the simplicial complex and witnesses help to decide on which simplices are inserted via a +predicate "is witnessed". + +De Silva and Carlsson in their paper :cite:`de2004topological` differentiate **weak witnessing** and +**strong witnessing**: + +- *weak*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l) \leq d(w,l')` +- *strong*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l) \leq d(w,l')` + +where :math:`d(.,.)` is a distance function. + +Both definitions can be relaxed by a real value :math:`\alpha`: + +- *weak*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2` +- *strong*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2` + +which leads to definitions of **weak relaxed witness complex** (or just relaxed witness complex for short) and +**strong relaxed witness complex** respectively. + +.. figure:: ../../doc/Witness_complex/swit.svg + :alt: Strongly witnessed simplex + :figclass: align-center + + Strongly witnessed simplex + + +In particular case of 0-relaxation, weak complex corresponds to **witness complex** introduced in +:cite:`de2004topological`, whereas 0-relaxed strong witness complex consists of just vertices and is not very +interesting. Hence for small relaxation weak version is preferable. +However, to capture the homotopy type (for example using Gudhi::persistent_cohomology::Persistent_cohomology) it is +often necessary to work with higher filtration values. In this case strong relaxed witness complex is faster to compute +and offers similar results. + +Implementation +-------------- + +The two complexes described above are implemented in the corresponding classes + +- :doc:`witness_complex_ref` +- :doc:`strong_witness_complex_ref` +- :doc:`euclidean_witness_complex_ref` +- :doc:`euclidean_strong_witness_complex_ref` + +The construction of the Euclidean versions of complexes follow the same scheme: + +1. Construct a search tree on landmarks. +2. Construct lists of nearest landmarks for each witness. +3. Construct the witness complex for nearest landmark lists. + +In the non-Euclidean classes, the lists of nearest landmarks are supposed to be given as input. + +The constructors take on the steps 1 and 2, while the function 'create_complex' executes the step 3. + +Constructing weak relaxed witness complex from an off file +---------------------------------------------------------- + +Let's start with a simple example, which reads an off point file and computes a weak witness complex. + +.. code-block:: python + + import gudhi + import argparse + + parser = argparse.ArgumentParser(description='EuclideanWitnessComplex creation from ' + 'points read in a OFF file.', + epilog='Example: ' + 'example/witness_complex_diagram_persistence_from_off_file_example.py ' + '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2' + '- Constructs a alpha complex with the ' + 'points from the given OFF file.') + parser.add_argument("-f", "--file", type=str, required=True) + parser.add_argument("-a", "--max_alpha_square", type=float, required=True) + parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) + parser.add_argument("-d", "--limit_dimension", type=int, required=True) + + args = parser.parse_args() + + with open(args.file, 'r') as f: + first_line = f.readline() + if (first_line == 'OFF\n') or (first_line == 'nOFF\n'): + print("#####################################################################") + print("EuclideanWitnessComplex creation from points read in a OFF file") + + witnesses = gudhi.read_off(off_file=args.file) + landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks) + + message = "EuclideanWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \ + " - Number of landmarks=" + repr(args.number_of_landmarks) + print(message) + + witness_complex = gudhi.EuclideanWitnessComplex(witnesses=witnesses, landmarks=landmarks) + simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square, + limit_dimension=args.limit_dimension) + + message = "Number of simplices=" + repr(simplex_tree.num_simplices()) + print(message) + else: + print(args.file, "is not a valid OFF file") + + f.close() + + +Example2: Computing persistence using strong relaxed witness complex +-------------------------------------------------------------------- + +Here is an example of constructing a strong witness complex filtration and computing persistence on it: + +* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>` + +Bibliography +============ + +.. bibliography:: ../../biblio/bibliography.bib + :filter: docnames + :style: unsrt diff --git a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py new file mode 100755 index 00000000..b8f283b3 --- /dev/null +++ b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="AlphaComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/alpha_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 0.6" + "- Constructs a alpha complex with the " + "points from the given OFF file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5) +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) + +args = parser.parse_args() + +with open(args.file, "r") as f: + first_line = f.readline() + if (first_line == "OFF\n") or (first_line == "nOFF\n"): + print("#####################################################################") + print("AlphaComplex creation from points read in a OFF file") + + message = "AlphaComplex with max_edge_length=" + repr(args.max_alpha_square) + print(message) + + alpha_complex = gudhi.AlphaComplex(off_file=args.file) + simplex_tree = alpha_complex.create_simplex_tree( + max_alpha_square=args.max_alpha_square + ) + + message = "Number of simplices=" + repr(simplex_tree.num_simplices()) + print(message) + + diag = simplex_tree.persistence() + + print("betti_numbers()=") + print(simplex_tree.betti_numbers()) + + if args.no_diagram == False: + pplot = gudhi.plot_persistence_diagram(diag, band=args.band) + pplot.show() + else: + print(args.file, "is not a valid OFF file") + + f.close() diff --git a/src/python/example/alpha_complex_from_points_example.py b/src/python/example/alpha_complex_from_points_example.py new file mode 100755 index 00000000..a746998c --- /dev/null +++ b/src/python/example/alpha_complex_from_points_example.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +from gudhi import AlphaComplex, SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +print("#####################################################################") +print("AlphaComplex creation from points") +alpha_complex = AlphaComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]]) +simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0) + +if simplex_tree.find([0, 1]): + print("[0, 1] Found !!") +else: + print("[0, 1] Not found...") + +if simplex_tree.find([4]): + print("[4] Found !!") +else: + print("[4] Not found...") + +if simplex_tree.insert([0, 1, 2], filtration=4.0): + print("[0, 1, 2] Inserted !!") +else: + print("[0, 1, 2] Not inserted...") + +if simplex_tree.insert([0, 1, 4], filtration=4.0): + print("[0, 1, 4] Inserted !!") +else: + print("[0, 1, 4] Not inserted...") + +if simplex_tree.find([4]): + print("[4] Found !!") +else: + print("[4] Not found...") + +print("dimension=", simplex_tree.dimension()) +print("filtrations=", simplex_tree.get_filtration()) +print("star([0])=", simplex_tree.get_star([0])) +print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1)) + +print("point[0]=", alpha_complex.get_point(0)) +print("point[5]=", alpha_complex.get_point(5)) diff --git a/src/python/example/alpha_rips_persistence_bottleneck_distance.py b/src/python/example/alpha_rips_persistence_bottleneck_distance.py new file mode 100755 index 00000000..086307ee --- /dev/null +++ b/src/python/example/alpha_rips_persistence_bottleneck_distance.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python + +import gudhi +import argparse +import math + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="AlphaComplex and RipsComplex " + "persistence creation from points read in " + "a OFF file. Bottleneck distance computation" + " on each dimension", + epilog="Example: " + "example/alpha_rips_persistence_bottleneck_distance.py " + "-f ../data/points/tore3D_1307.off -t 0.15 -d 3", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-t", "--threshold", type=float, default=0.5) +parser.add_argument("-d", "--max_dimension", type=int, default=1) + +args = parser.parse_args() +with open(args.file, "r") as f: + first_line = f.readline() + if (first_line == "OFF\n") or (first_line == "nOFF\n"): + point_cloud = gudhi.read_off(off_file=args.file) + print("#####################################################################") + print("RipsComplex creation from points read in a OFF file") + + message = "RipsComplex with max_edge_length=" + repr(args.threshold) + print(message) + + rips_complex = gudhi.RipsComplex( + points=point_cloud, max_edge_length=args.threshold + ) + + rips_stree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) + + message = "Number of simplices=" + repr(rips_stree.num_simplices()) + print(message) + + rips_diag = rips_stree.persistence() + + print("#####################################################################") + print("AlphaComplex creation from points read in a OFF file") + + message = "AlphaComplex with max_edge_length=" + repr(args.threshold) + print(message) + + alpha_complex = gudhi.AlphaComplex(points=point_cloud) + alpha_stree = alpha_complex.create_simplex_tree( + max_alpha_square=(args.threshold * args.threshold) + ) + + message = "Number of simplices=" + repr(alpha_stree.num_simplices()) + print(message) + + alpha_diag = alpha_stree.persistence() + + max_b_distance = 0.0 + for dim in range(args.max_dimension): + # Alpha persistence values needs to be transform because filtration + # values are alpha square values + funcs = [math.sqrt, math.sqrt] + alpha_intervals = [] + for interval in alpha_stree.persistence_intervals_in_dimension(dim): + alpha_intervals.append( + map(lambda func, value: func(value), funcs, interval) + ) + + rips_intervals = rips_stree.persistence_intervals_in_dimension(dim) + bottleneck_distance = gudhi.bottleneck_distance( + rips_intervals, alpha_intervals + ) + message = ( + "In dimension " + + repr(dim) + + ", bottleneck distance = " + + repr(bottleneck_distance) + ) + print(message) + max_b_distance = max(bottleneck_distance, max_b_distance) + + print( + "================================================================================" + ) + message = "Bottleneck distance is " + repr(max_b_distance) + print(message) + + else: + print(args.file, "is not a valid OFF file") + + f.close() diff --git a/src/python/example/bottleneck_basic_example.py b/src/python/example/bottleneck_basic_example.py new file mode 100755 index 00000000..392d2a6e --- /dev/null +++ b/src/python/example/bottleneck_basic_example.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Francois Godi, Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]] + +diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]] + +message = "diag1=" + repr(diag1) +print(message) + +message = "diag2=" + repr(diag2) +print(message) + +message = "Bottleneck distance approximation=" + repr( + gudhi.bottleneck_distance(diag1, diag2, 0.1) +) +print(message) + +message = "Bottleneck distance exact value=" + repr( + gudhi.bottleneck_distance(diag1, diag2) +) +print(message) diff --git a/src/python/example/coordinate_graph_induced_complex.py b/src/python/example/coordinate_graph_induced_complex.py new file mode 100755 index 00000000..e32141b4 --- /dev/null +++ b/src/python/example/coordinate_graph_induced_complex.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="Coordinate GIC " "from points read in a OFF file.", + epilog="Example: " + "example/coordinate_graph_induced_complex.py " + "-f ../data/points/KleinBottle5D.off -c 0 -v" + "- Constructs the coordinate GIC with the " + "points from the given OFF file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-c", "--coordinate", type=int, default=0) +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) + +args = parser.parse_args() + +nerve_complex = gudhi.CoverComplex() +nerve_complex.set_verbose(args.verbose) + +if nerve_complex.read_point_cloud(args.file): + nerve_complex.set_type("GIC") + nerve_complex.set_color_from_coordinate(args.coordinate) + nerve_complex.set_function_from_coordinate(args.coordinate) + nerve_complex.set_graph_from_automatic_rips() + nerve_complex.set_automatic_resolution() + nerve_complex.set_gain() + nerve_complex.set_cover_from_function() + nerve_complex.find_simplices() + nerve_complex.plot_dot() + simplex_tree = nerve_complex.create_simplex_tree() + nerve_complex.compute_PD() + if args.verbose: + print("Iterator on coordinate GIC simplices") + result_str = ( + "Coordinate GIC is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) + print(result_str) + for filtered_value in simplex_tree.get_filtration(): + print(filtered_value[0]) diff --git a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py new file mode 100755 index 00000000..610ba44f --- /dev/null +++ b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="EuclideanStrongWitnessComplex creation from " + "points read in a OFF file.", + epilog="Example: " + "example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2" + "- Constructs a strong witness complex with the " + "points from the given OFF file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-a", "--max_alpha_square", type=float, required=True) +parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) +parser.add_argument("-d", "--limit_dimension", type=int, required=True) +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) + +args = parser.parse_args() + +with open(args.file, "r") as f: + first_line = f.readline() + if (first_line == "OFF\n") or (first_line == "nOFF\n"): + print("#####################################################################") + print("EuclideanStrongWitnessComplex creation from points read in a OFF file") + + witnesses = gudhi.read_off(off_file=args.file) + landmarks = gudhi.pick_n_random_points( + points=witnesses, nb_points=args.number_of_landmarks + ) + + message = ( + "EuclideanStrongWitnessComplex with max_edge_length=" + + repr(args.max_alpha_square) + + " - Number of landmarks=" + + repr(args.number_of_landmarks) + ) + print(message) + + witness_complex = gudhi.EuclideanStrongWitnessComplex( + witnesses=witnesses, landmarks=landmarks + ) + simplex_tree = witness_complex.create_simplex_tree( + max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension + ) + + message = "Number of simplices=" + repr(simplex_tree.num_simplices()) + print(message) + + diag = simplex_tree.persistence() + + print("betti_numbers()=") + print(simplex_tree.betti_numbers()) + + if args.no_diagram == False: + pplot = gudhi.plot_persistence_diagram(diag, band=args.band) + pplot.show() + else: + print(args.file, "is not a valid OFF file") + + f.close() diff --git a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py new file mode 100755 index 00000000..7587b732 --- /dev/null +++ b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="EuclideanWitnessComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2" + "- Constructs a weak witness complex with the " + "points from the given OFF file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-a", "--max_alpha_square", type=float, required=True) +parser.add_argument("-n", "--number_of_landmarks", type=int, required=True) +parser.add_argument("-d", "--limit_dimension", type=int, required=True) +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) + +args = parser.parse_args() + +with open(args.file, "r") as f: + first_line = f.readline() + if (first_line == "OFF\n") or (first_line == "nOFF\n"): + print("#####################################################################") + print("EuclideanWitnessComplex creation from points read in a OFF file") + + witnesses = gudhi.read_off(off_file=args.file) + landmarks = gudhi.pick_n_random_points( + points=witnesses, nb_points=args.number_of_landmarks + ) + + message = ( + "EuclideanWitnessComplex with max_edge_length=" + + repr(args.max_alpha_square) + + " - Number of landmarks=" + + repr(args.number_of_landmarks) + ) + print(message) + + witness_complex = gudhi.EuclideanWitnessComplex( + witnesses=witnesses, landmarks=landmarks + ) + simplex_tree = witness_complex.create_simplex_tree( + max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension + ) + + message = "Number of simplices=" + repr(simplex_tree.num_simplices()) + print(message) + + diag = simplex_tree.persistence() + + print("betti_numbers()=") + print(simplex_tree.betti_numbers()) + + if args.no_diagram == False: + pplot = gudhi.plot_persistence_diagram(diag, band=args.band) + pplot.show() + else: + print(args.file, "is not a valid OFF file") + + f.close() diff --git a/src/python/example/functional_graph_induced_complex.py b/src/python/example/functional_graph_induced_complex.py new file mode 100755 index 00000000..8b645040 --- /dev/null +++ b/src/python/example/functional_graph_induced_complex.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="Functional GIC " "from points read in a OFF file.", + epilog="Example: " + "example/functional_graph_induced_complex.py " + "-o ../data/points/COIL_database/lucky_cat.off " + "-f ../data/points/COIL_database/lucky_cat_PCA1" + "- Constructs the functional GIC with the " + "points from the given OFF and function files.", +) +parser.add_argument("-o", "--off-file", type=str, required=True) +parser.add_argument("-f", "--function-file", type=str, required=True) +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) + +args = parser.parse_args() + +nerve_complex = gudhi.CoverComplex() +nerve_complex.set_verbose(args.verbose) + +if nerve_complex.read_point_cloud(args.off_file): + nerve_complex.set_type("GIC") + nerve_complex.set_color_from_file(args.function_file) + nerve_complex.set_function_from_file(args.function_file) + nerve_complex.set_graph_from_automatic_rips() + nerve_complex.set_automatic_resolution() + nerve_complex.set_gain() + nerve_complex.set_cover_from_function() + nerve_complex.find_simplices() + nerve_complex.plot_dot() + simplex_tree = nerve_complex.create_simplex_tree() + nerve_complex.compute_PD() + if args.verbose: + print("Iterator on functional GIC simplices") + result_str = ( + "Functional GIC is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) + print(result_str) + for filtered_value in simplex_tree.get_filtration(): + print(filtered_value[0]) diff --git a/src/python/example/gudhi_graphical_tools_example.py b/src/python/example/gudhi_graphical_tools_example.py new file mode 100755 index 00000000..3b0ca54d --- /dev/null +++ b/src/python/example/gudhi_graphical_tools_example.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +print("#####################################################################") +print("Show barcode persistence example") + +persistence = [ + (2, (1.0, float("inf"))), + (1, (1.4142135623730951, float("inf"))), + (1, (1.4142135623730951, float("inf"))), + (0, (0.0, float("inf"))), + (0, (0.0, 1.0)), + (0, (0.0, 1.0)), + (0, (0.0, 1.0)), +] +gudhi.plot_persistence_barcode(persistence) + +print("#####################################################################") +print("Show diagram persistence example") + +pplot = gudhi.plot_persistence_diagram(persistence) +pplot.show() + +print("#####################################################################") +print("Show diagram persistence example with a confidence band") + +pplot = gudhi.plot_persistence_diagram(persistence, band=0.2) +pplot.show() diff --git a/src/python/example/nerve_of_a_covering.py b/src/python/example/nerve_of_a_covering.py new file mode 100755 index 00000000..3c8e0f90 --- /dev/null +++ b/src/python/example/nerve_of_a_covering.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="Nerve of a covering creation " "from points read in a OFF file.", + epilog="Example: " + "example/nerve_of_a_covering.py " + "-f ../data/points/human.off -c 2 -r 10 -g 0.3" + "- Constructs Nerve of a covering with the " + "points from the given OFF file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-c", "--coordinate", type=int, default=0) +parser.add_argument("-r", "--resolution", type=int, default=10) +parser.add_argument("-g", "--gain", type=float, default=0.3) +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) + +args = parser.parse_args() + +nerve_complex = gudhi.CoverComplex() +nerve_complex.set_verbose(args.verbose) + +if nerve_complex.read_point_cloud(args.file): + nerve_complex.set_type("Nerve") + nerve_complex.set_color_from_coordinate(args.coordinate) + nerve_complex.set_function_from_coordinate(args.coordinate) + nerve_complex.set_graph_from_OFF() + nerve_complex.set_resolution_with_interval_number(args.resolution) + nerve_complex.set_gain(args.gain) + nerve_complex.set_cover_from_function() + nerve_complex.find_simplices() + nerve_complex.write_info() + simplex_tree = nerve_complex.create_simplex_tree() + nerve_complex.compute_PD() + if args.verbose: + print("Iterator on graph induced complex simplices") + result_str = ( + "Nerve is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) + print(result_str) + for filtered_value in simplex_tree.get_filtration(): + print(filtered_value[0]) diff --git a/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py new file mode 100755 index 00000000..9cb855cd --- /dev/null +++ b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def is_file_perseus(file): + num_lines = open(file).read().count("\n") + try: + f = open(file) + num_dim = int(f.readline()) + coeff = 1 + for dim in range(0, num_dim): + try: + line = int(f.readline()) + coeff *= abs(line) + except ValueError: + return False + if num_lines == (1 + num_dim + coeff): + return True + else: + return False + except ValueError: + return False + + +parser = argparse.ArgumentParser( + description="Periodic cubical complex from a " "Perseus-style file name.", + epilog="Example: " + "./periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py" + " -f ../data/bitmap/CubicalTwoSphere.txt", +) + +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument( + "--no-barcode", + default=False, + action="store_true", + help="Flag for not to display the barcodes", +) + +args = parser.parse_args() + +if is_file_perseus(args.file): + print("#####################################################################") + print("PeriodicCubicalComplex creation") + periodic_cubical_complex = gudhi.PeriodicCubicalComplex(perseus_file=args.file) + + print("persistence(homology_coeff_field=3, min_persistence=0)=") + diag = periodic_cubical_complex.persistence( + homology_coeff_field=3, min_persistence=0 + ) + print(diag) + + print("betti_numbers()=") + print(periodic_cubical_complex.betti_numbers()) + if args.no_barcode == False: + gudhi.plot_persistence_barcode(diag) +else: + print(args.file, "is not a valid perseus style file") diff --git a/src/python/example/random_cubical_complex_persistence_example.py b/src/python/example/random_cubical_complex_persistence_example.py new file mode 100755 index 00000000..da0eb177 --- /dev/null +++ b/src/python/example/random_cubical_complex_persistence_example.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +import gudhi +import numpy +from functools import reduce +import argparse +import operator + + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="Random cubical complex.", + epilog="Example: " + "./random_cubical_complex_persistence_example.py" + " 10 10 10 - Constructs a random cubical " + "complex in a dimension [10, 10, 10] (aka. " + "1000 random top dimensional cells).", +) +parser.add_argument("dimension", type=int, nargs="*", help="Cubical complex dimensions") + +args = parser.parse_args() +dimension_multiplication = reduce(operator.mul, args.dimension, 1) + +if dimension_multiplication > 1: + print("#####################################################################") + print("CubicalComplex creation") + cubical_complex = gudhi.CubicalComplex( + dimensions=args.dimension, + top_dimensional_cells=numpy.random.rand(dimension_multiplication), + ) + + print("persistence(homology_coeff_field=2, min_persistence=0)=") + print(cubical_complex.persistence(homology_coeff_field=2, min_persistence=0)) + + print("betti_numbers()=") + print(cubical_complex.betti_numbers()) diff --git a/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py new file mode 100755 index 00000000..3571580b --- /dev/null +++ b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python + +import gudhi +import sys +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2017 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2017 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="RipsComplex creation from " "a correlation matrix read in a csv file.", + epilog="Example: " + "example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py " + "-f ../data/correlation_matrix/lower_triangular_correlation_matrix.csv -e 12.0 -d 3" + "- Constructs a Rips complex with the " + "correlation matrix from the given csv file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-c", "--min_edge_correlation", type=float, default=0.5) +parser.add_argument("-d", "--max_dimension", type=int, default=1) +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) + +args = parser.parse_args() + +if not (-1.0 < args.min_edge_correlation < 1.0): + print("Wrong value of the treshold corelation (should be between -1 and 1).") + sys.exit(1) + +print("#####################################################################") +print("Caution: as persistence diagrams points will be under the diagonal,") +print("bottleneck distance and persistence graphical tool will not work") +print("properly, this is a known issue.") + +print("#####################################################################") +print("RipsComplex creation from correlation matrix read in a csv file") + +message = "RipsComplex with min_edge_correlation=" + repr(args.min_edge_correlation) +print(message) + +correlation_matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file=args.file +) +# Given a correlation matrix M, we compute component-wise M'[i,j] = 1-M[i,j] to get a distance matrix: +distance_matrix = [ + [1.0 - correlation_matrix[i][j] for j in range(len(correlation_matrix[i]))] + for i in range(len(correlation_matrix)) +] + +rips_complex = gudhi.RipsComplex( + distance_matrix=distance_matrix, max_edge_length=1.0 - args.min_edge_correlation +) +simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) + +message = "Number of simplices=" + repr(simplex_tree.num_simplices()) +print(message) + +diag = simplex_tree.persistence() + +print("betti_numbers()=") +print(simplex_tree.betti_numbers()) + +# invert the persistence diagram +invert_diag = [ + (diag[pers][0], (1.0 - diag[pers][1][0], 1.0 - diag[pers][1][1])) + for pers in range(len(diag)) +] + +if args.no_diagram == False: + pplot = gudhi.plot_persistence_diagram(invert_diag, band=args.band) + pplot.show() diff --git a/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py new file mode 100755 index 00000000..0b9a9ba9 --- /dev/null +++ b/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="RipsComplex creation from " "a distance matrix read in a csv file.", + epilog="Example: " + "example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py " + "-f ../data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3" + "- Constructs a Rips complex with the " + "distance matrix from the given csv file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-e", "--max_edge_length", type=float, default=0.5) +parser.add_argument("-d", "--max_dimension", type=int, default=1) +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) + +args = parser.parse_args() + +print("#####################################################################") +print("RipsComplex creation from distance matrix read in a csv file") + +message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length) +print(message) + +distance_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=args.file) +rips_complex = gudhi.RipsComplex( + distance_matrix=distance_matrix, max_edge_length=args.max_edge_length +) +simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension) + +message = "Number of simplices=" + repr(simplex_tree.num_simplices()) +print(message) + +diag = simplex_tree.persistence() + +print("betti_numbers()=") +print(simplex_tree.betti_numbers()) + +if args.no_diagram == False: + pplot = gudhi.plot_persistence_diagram(diag, band=args.band) + pplot.show() diff --git a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py new file mode 100755 index 00000000..2b335bba --- /dev/null +++ b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="RipsComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/rips_complex_diagram_persistence_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -a 0.6" + "- Constructs a Rips complex with the " + "points from the given OFF file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-e", "--max_edge_length", type=float, default=0.5) +parser.add_argument("-d", "--max_dimension", type=int, default=1) +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) + +args = parser.parse_args() + +with open(args.file, "r") as f: + first_line = f.readline() + if (first_line == "OFF\n") or (first_line == "nOFF\n"): + print("#####################################################################") + print("RipsComplex creation from points read in a OFF file") + + message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length) + print(message) + + point_cloud = gudhi.read_off(off_file=args.file) + rips_complex = gudhi.RipsComplex( + points=point_cloud, max_edge_length=args.max_edge_length + ) + simplex_tree = rips_complex.create_simplex_tree( + max_dimension=args.max_dimension + ) + + message = "Number of simplices=" + repr(simplex_tree.num_simplices()) + print(message) + + diag = simplex_tree.persistence() + + print("betti_numbers()=") + print(simplex_tree.betti_numbers()) + + if args.no_diagram == False: + pplot = gudhi.plot_persistence_diagram(diag, band=args.band) + pplot.show() + else: + print(args.file, "is not a valid OFF file") + + f.close() diff --git a/src/python/example/rips_complex_from_points_example.py b/src/python/example/rips_complex_from_points_example.py new file mode 100755 index 00000000..59d8a261 --- /dev/null +++ b/src/python/example/rips_complex_from_points_example.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python + +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +print("#####################################################################") +print("RipsComplex creation from points") +rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42) + +simplex_tree = rips.create_simplex_tree(max_dimension=1) + +print("filtrations=", simplex_tree.get_filtration()) +print("star([0])=", simplex_tree.get_star([0])) +print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1)) diff --git a/src/python/example/rips_persistence_diagram.py b/src/python/example/rips_persistence_diagram.py new file mode 100755 index 00000000..f5897d7b --- /dev/null +++ b/src/python/example/rips_persistence_diagram.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python + +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Marc Glisse" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +print("#####################################################################") +print("RipsComplex creation from points") +rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42) + +simplex_tree = rips.create_simplex_tree(max_dimension=1) + + +diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0) +print("diag=", diag) + +pplot = gudhi.plot_persistence_diagram(diag) +pplot.show() diff --git a/src/python/example/simplex_tree_example.py b/src/python/example/simplex_tree_example.py new file mode 100755 index 00000000..30de00da --- /dev/null +++ b/src/python/example/simplex_tree_example.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +print("#####################################################################") +print("SimplexTree creation from insertion") + +st = gudhi.SimplexTree() + +if st.insert([0, 1]): + print("Inserted !!") +else: + print("Not inserted...") + +if st.find([0, 1]): + print("Found !!") +else: + print("Not found...") + +if st.insert([0, 1, 2], filtration=4.0): + print("Inserted !!") +else: + print("Not inserted...") + +print("dimension=", st.dimension()) + +st.initialize_filtration() +print("filtration=", st.get_filtration()) +print("filtration[1, 2]=", st.filtration([1, 2])) +print("filtration[4, 2]=", st.filtration([4, 2])) + +print("num_simplices=", st.num_simplices()) +print("num_vertices=", st.num_vertices()) + +print("skeleton[2]=", st.get_skeleton(2)) +print("skeleton[1]=", st.get_skeleton(1)) +print("skeleton[0]=", st.get_skeleton(0)) diff --git a/src/python/example/sparse_rips_persistence_diagram.py b/src/python/example/sparse_rips_persistence_diagram.py new file mode 100755 index 00000000..671d5e34 --- /dev/null +++ b/src/python/example/sparse_rips_persistence_diagram.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Marc Glisse" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "MIT" + +print("#####################################################################") +print("Sparse RipsComplex creation from points") +rips = gudhi.RipsComplex( + points=[[0, 0], [0, 0.1], [1, 0], [0, 1], [1, 1]], max_edge_length=42, sparse=0.5 +) + +simplex_tree = rips.create_simplex_tree(max_dimension=2) + + +diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0) +print("diag=", diag) + +pplot = gudhi.plot_persistence_diagram(diag) +pplot.show() diff --git a/src/python/example/tangential_complex_plain_homology_from_off_file_example.py b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py new file mode 100755 index 00000000..456bc9eb --- /dev/null +++ b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="TangentialComplex creation from " "points read in a OFF file.", + epilog="Example: " + "example/tangential_complex_plain_homology_from_off_file_example.py " + "-f ../data/points/tore3D_300.off -i 3" + "- Constructs a tangential complex with the " + "points from the given OFF file", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-i", "--intrisic_dim", type=int, required=True) +parser.add_argument("-b", "--band", type=float, default=0.0) +parser.add_argument( + "--no-diagram", + default=False, + action="store_true", + help="Flag for not to display the diagrams", +) + +args = parser.parse_args() + +with open(args.file, "r") as f: + first_line = f.readline() + if (first_line == "OFF\n") or (first_line == "nOFF\n"): + print("#####################################################################") + print("TangentialComplex creation from points read in a OFF file") + + tc = gudhi.TangentialComplex(intrisic_dim=args.intrisic_dim, off_file=args.file) + tc.compute_tangential_complex() + st = tc.create_simplex_tree() + + message = "Number of simplices=" + repr(st.num_simplices()) + print(message) + + diag = st.persistence(persistence_dim_max=True) + + print("betti_numbers()=") + print(st.betti_numbers()) + + if args.no_diagram == False: + pplot = gudhi.plot_persistence_diagram(diag, band=args.band) + pplot.show() + else: + print(args.file, "is not a valid OFF file") + + f.close() diff --git a/src/python/example/voronoi_graph_induced_complex.py b/src/python/example/voronoi_graph_induced_complex.py new file mode 100755 index 00000000..38be6c92 --- /dev/null +++ b/src/python/example/voronoi_graph_induced_complex.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +import gudhi +import argparse + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "MIT" + +parser = argparse.ArgumentParser( + description="Voronoi GIC " "from points read in a OFF file.", + epilog="Example: " + "example/voronoi_graph_induced_complex.py " + "-f ../data/points/human.off -n 700 -v" + "- Constructs the Voronoi GIC with the " + "points from the given OFF file.", +) +parser.add_argument("-f", "--file", type=str, required=True) +parser.add_argument("-n", "--subsample-nb-points", type=int, default=100) +parser.add_argument( + "-v", + "--verbose", + default=False, + action="store_true", + help="Flag for program verbosity", +) + +args = parser.parse_args() + +nerve_complex = gudhi.CoverComplex() +nerve_complex.set_verbose(args.verbose) + +if nerve_complex.read_point_cloud(args.file): + nerve_complex.set_type("GIC") + nerve_complex.set_color_from_coordinate() + nerve_complex.set_graph_from_OFF() + nerve_complex.set_cover_from_Voronoi(args.subsample_nb_points) + nerve_complex.find_simplices() + nerve_complex.plot_off() + simplex_tree = nerve_complex.create_simplex_tree() + nerve_complex.compute_PD() + if args.verbose: + print("Iterator on graph induced complex simplices") + result_str = ( + "Graph induced complex is of dimension " + + repr(simplex_tree.dimension()) + + " - " + + repr(simplex_tree.num_simplices()) + + " simplices - " + + repr(simplex_tree.num_vertices()) + + " vertices." + ) + print(result_str) + for filtered_value in simplex_tree.get_filtration(): + print(filtered_value[0]) diff --git a/src/python/example/witness_complex_from_nearest_landmark_table.py b/src/python/example/witness_complex_from_nearest_landmark_table.py new file mode 100755 index 00000000..c04a82b2 --- /dev/null +++ b/src/python/example/witness_complex_from_nearest_landmark_table.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +from gudhi import StrongWitnessComplex, SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +print("#####################################################################") +print("WitnessComplex creation from nearest landmark table") +nearest_landmark_table = [ + [[0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]], + [[1, 0.0], [2, 0.1], [3, 0.2], [4, 0.3], [0, 0.4]], + [[2, 0.0], [3, 0.1], [4, 0.2], [0, 0.3], [1, 0.4]], + [[3, 0.0], [4, 0.1], [0, 0.2], [1, 0.3], [2, 0.4]], + [[4, 0.0], [0, 0.1], [1, 0.2], [2, 0.3], [3, 0.4]], +] + +witness_complex = StrongWitnessComplex(nearest_landmark_table=nearest_landmark_table) +simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=0.41) + +message = "Number of simplices: " + repr(simplex_tree.num_simplices()) +print(message) + +diag = simplex_tree.persistence(min_persistence=-0.1, homology_coeff_field=11) +print(diag) diff --git a/src/python/gudhi/__init__.py b/src/python/gudhi/__init__.py new file mode 100644 index 00000000..fde749eb --- /dev/null +++ b/src/python/gudhi/__init__.py @@ -0,0 +1 @@ +# Fake empty __init__.py for cython to accept this directory as a Python package diff --git a/src/python/gudhi/__init__.py.in b/src/python/gudhi/__init__.py.in new file mode 100644 index 00000000..947aa3c9 --- /dev/null +++ b/src/python/gudhi/__init__.py.in @@ -0,0 +1,40 @@ +from importlib import import_module + +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "GUDHI Editorial Board" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "https://gudhi.inria.fr/licensing/" +__version__ = "@GUDHI_VERSION@" +# This variable is used by doctest to find files +__root_source_dir__ = "@CMAKE_SOURCE_DIR@" +__debug_info__ = @GUDHI_PYTHON_DEBUG_INFO@ + +from sys import exc_info +from importlib import import_module + +__all__ = [@GUDHI_PYTHON_MODULES@] + +__available_modules__ = '' +__missing_modules__ = '' + +# try to import * from gudhi.__module_name__ +for __module_name__ in __all__: + try: + __module__ = import_module('gudhi.' + __module_name__) + try: + __to_import__ = __module__.__all__ + except AttributeError: + __to_import__ = [name for name in __module__.__dict__ if not name.startswith('_')] + globals().update({name: __module__.__dict__[name] for name in __to_import__}) + __available_modules__ += __module_name__ + ";" + except: + __missing_modules__ += __module_name__ + ";" diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx new file mode 100644 index 00000000..6d6309db --- /dev/null +++ b/src/python/gudhi/alpha_complex.pyx @@ -0,0 +1,116 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +from libc.stdint cimport intptr_t +import os + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": + cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": + Alpha_complex_interface(vector[vector[double]] points) + # bool from_file is a workaround for cython to find the correct signature + Alpha_complex_interface(string off_file, bool from_file) + vector[double] get_point(int vertex) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + +# AlphaComplex python interface +cdef class AlphaComplex: + """AlphaComplex is a simplicial complex constructed from the finite cells + of a Delaunay Triangulation. + + The filtration value of each simplex is computed as the square of the + circumradius of the simplex if the circumsphere is empty (the simplex is + then said to be Gabriel), and as the minimum of the filtration values of + the codimension 1 cofaces that make it not Gabriel otherwise. + + All simplices that have a filtration value strictly greater than a given + alpha squared value are not inserted into the complex. + + .. note:: + + When Alpha_complex is constructed with an infinite value of alpha, the + complex is a Delaunay complex. + + """ + + cdef Alpha_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, points=None, off_file=''): + """AlphaComplex constructor. + + :param points: A list of points in d-Dimension. + :type points: list of list of double + + Or + + :param off_file: An OFF file style name. + :type off_file: string + """ + + # The real cython constructor + def __cinit__(self, points=None, off_file=''): + if off_file is not '': + if os.path.isfile(off_file): + self.thisptr = new Alpha_complex_interface(str.encode(off_file), True) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty Alpha construction + points=[] + self.thisptr = new Alpha_complex_interface(points) + + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if AlphaComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int + :rtype: list of float + :returns: the point. + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + + def create_simplex_tree(self, max_alpha_square=float('inf')): + """ + :param max_alpha_square: The maximum alpha square threshold the + simplices shall not exceed. Default is set to infinity, and + there is very little point using anything else since it does + not save time. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + self.thisptr.create_simplex_tree(stree_int_ptr, max_alpha_square) + return stree diff --git a/src/python/gudhi/bottleneck.pyx b/src/python/gudhi/bottleneck.pyx new file mode 100644 index 00000000..4b378cbc --- /dev/null +++ b/src/python/gudhi/bottleneck.pyx @@ -0,0 +1,49 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram": + double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double) + double bottleneck(vector[pair[double, double]], vector[pair[double, double]]) + +def bottleneck_distance(diagram_1, diagram_2, e=None): + """This function returns the point corresponding to a given vertex. + + :param diagram_1: The first diagram. + :type diagram_1: vector[pair[double, double]] + :param diagram_2: The second diagram. + :type diagram_2: vector[pair[double, double]] + :param e: If `e` is 0, this uses an expensive algorithm to compute the + exact distance. + If `e` is not 0, it asks for an additive `e`-approximation, and + currently also allows a small multiplicative error (the last 2 or 3 + bits of the mantissa may be wrong). This version of the algorithm takes + advantage of the limited precision of `double` and is usually a lot + faster to compute, whatever the value of `e`. + + Thus, by default, `e` is the smallest positive double. + :type e: float + :rtype: float + :returns: the bottleneck distance. + """ + if e is None: + # Default value is the smallest double value (not 0, 0 is for exact version) + return bottleneck(diagram_1, diagram_2) + else: + # Can be 0 for exact version + return bottleneck(diagram_1, diagram_2, e) diff --git a/src/python/gudhi/cubical_complex.pyx b/src/python/gudhi/cubical_complex.pyx new file mode 100644 index 00000000..0dc133d1 --- /dev/null +++ b/src/python/gudhi/cubical_complex.pyx @@ -0,0 +1,188 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +import os + +from numpy import array as np_array + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": + cdef cppclass Bitmap_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<>": + Bitmap_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells) + Bitmap_cubical_complex_base_interface(string perseus_file) + int num_simplices() + int dimension() + +cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": + cdef cppclass Cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>": + Cubical_complex_persistence_interface(Bitmap_cubical_complex_base_interface * st, bool persistence_dim_max) + vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) + vector[int] betti_numbers() + vector[int] persistent_betti_numbers(double from_value, double to_value) + vector[pair[double,double]] intervals_in_dimension(int dimension) + +# CubicalComplex python interface +cdef class CubicalComplex: + """The CubicalComplex is an example of a structured complex useful in + computational mathematics (specially rigorous numerics) and image + analysis. + """ + cdef Bitmap_cubical_complex_base_interface * thisptr + + cdef Cubical_complex_persistence_interface * pcohptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, dimensions=None, top_dimensional_cells=None, + perseus_file=''): + """CubicalComplex constructor from dimensions and + top_dimensional_cells or from a Perseus-style file name. + + :param dimensions: A list of number of top dimensional cells. + :type dimensions: list of int + :param top_dimensional_cells: A list of cells filtration values. + :type top_dimensional_cells: list of double + + Or + + :param perseus_file: A Perseus-style file name. + :type perseus_file: string + """ + + # The real cython constructor + def __cinit__(self, dimensions=None, top_dimensional_cells=None, + perseus_file=''): + if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''): + self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells) + elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''): + if os.path.isfile(perseus_file): + self.thisptr = new Bitmap_cubical_complex_base_interface(str.encode(perseus_file)) + else: + print("file " + perseus_file + " not found.") + else: + print("CubicalComplex can be constructed from dimensions and " + "top_dimensional_cells or from a Perseus-style file name.") + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + if self.pcohptr != NULL: + del self.pcohptr + + def __is_defined(self): + """Returns true if CubicalComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def __is_persistence_defined(self): + """Returns true if Persistence pointer is not NULL. + """ + return self.pcohptr != NULL + + def num_simplices(self): + """This function returns the number of all cubes in the complex. + + :returns: int -- the number of all cubes in the complex. + """ + return self.thisptr.num_simplices() + + def dimension(self): + """This function returns the dimension of the complex. + + :returns: int -- the complex dimension. + """ + return self.thisptr.dimension() + + def persistence(self, homology_coeff_field=11, min_persistence=0): + """This function returns the persistence of the complex. + + :param homology_coeff_field: The homology coefficient field. Must be a + prime number + :type homology_coeff_field: int. + :param min_persistence: The minimum persistence value to take into + account (strictly greater than min_persistence). Default value is + 0.0. + Sets min_persistence to -1.0 to see all values. + :type min_persistence: float. + :returns: list of pairs(dimension, pair(birth, death)) -- the + persistence of the complex. + """ + if self.pcohptr != NULL: + del self.pcohptr + if self.thisptr != NULL: + self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True) + cdef vector[pair[int, pair[double, double]]] persistence_result + if self.pcohptr != NULL: + persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) + return persistence_result + + def betti_numbers(self): + """This function returns the Betti numbers of the complex. + + :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). + + :note: betti_numbers function requires persistence function to be + launched first. + + :note: betti_numbers function always returns [1, 0, 0, ...] as infinity + filtration cubes are not removed from the complex. + """ + cdef vector[int] bn_result + if self.pcohptr != NULL: + bn_result = self.pcohptr.betti_numbers() + return bn_result + + def persistent_betti_numbers(self, from_value, to_value): + """This function returns the persistent Betti numbers of the complex. + + :param from_value: The persistence birth limit to be added in the + numbers (persistent birth <= from_value). + :type from_value: float. + :param to_value: The persistence death limit to be added in the + numbers (persistent death > to_value). + :type to_value: float. + + :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., + Bn]). + + :note: persistent_betti_numbers function requires persistence + function to be launched first. + """ + cdef vector[int] pbn_result + if self.pcohptr != NULL: + pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) + return pbn_result + + def persistence_intervals_in_dimension(self, dimension): + """This function returns the persistence intervals of the complex in a + specific dimension. + + :param dimension: The specific dimension. + :type dimension: int. + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + + :note: intervals_in_dim function requires persistence function to be + launched first. + """ + cdef vector[pair[double,double]] intervals_result + if self.pcohptr != NULL: + intervals_result = self.pcohptr.intervals_in_dimension(dimension) + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") + return np_array(intervals_result) diff --git a/src/python/gudhi/euclidean_strong_witness_complex.pyx b/src/python/gudhi/euclidean_strong_witness_complex.pyx new file mode 100644 index 00000000..5d6e4fb9 --- /dev/null +++ b/src/python/gudhi/euclidean_strong_witness_complex.pyx @@ -0,0 +1,92 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface": + Euclidean_strong_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + vector[double] get_point(unsigned vertex) + +# EuclideanStrongWitnessComplex python interface +cdef class EuclideanStrongWitnessComplex: + """Constructs strong witness complex for given sets of witnesses and + landmarks in Euclidean space. + """ + + cdef Euclidean_strong_witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, landmarks=None, witnesses=None): + """WitnessComplex constructor. + + :param landmarks: A list of landmarks (in the point cloud). + :type landmarks: list of list of double + + :param witnesses: The point cloud. + :type witnesses: list of list of double + """ + + # The real cython constructor + def __cinit__(self, landmarks=None, witnesses=None): + if landmarks is not None and witnesses is not None: + self.thisptr = new Euclidean_strong_witness_complex_interface(landmarks, witnesses) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if WitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): + """ + :param max_alpha_square: The maximum alpha square threshold the + simplices shall not exceed. Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) + return stree + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int. + :returns: The point. + :rtype: list of float + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + diff --git a/src/python/gudhi/euclidean_witness_complex.pyx b/src/python/gudhi/euclidean_witness_complex.pyx new file mode 100644 index 00000000..2531919b --- /dev/null +++ b/src/python/gudhi/euclidean_witness_complex.pyx @@ -0,0 +1,92 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface": + Euclidean_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + vector[double] get_point(unsigned vertex) + +# EuclideanWitnessComplex python interface +cdef class EuclideanWitnessComplex: + """Constructs (weak) witness complex for given sets of witnesses and + landmarks in Euclidean space. + """ + + cdef Euclidean_witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, landmarks=None, witnesses=None): + """WitnessComplex constructor. + + :param landmarks: A list of landmarks (in the point cloud). + :type landmarks: list of list of double + + :param witnesses: The point cloud. + :type witnesses: list of list of double + """ + + # The real cython constructor + def __cinit__(self, landmarks=None, witnesses=None): + if landmarks is not None and witnesses is not None: + self.thisptr = new Euclidean_witness_complex_interface(landmarks, witnesses) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if WitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square, limit_dimension = -1): + """ + :param max_alpha_square: The maximum alpha square threshold the + simplices shall not exceed. Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) + return stree + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int. + :returns: The point. + :rtype: list of float + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + diff --git a/src/python/gudhi/nerve_gic.pyx b/src/python/gudhi/nerve_gic.pyx new file mode 100644 index 00000000..2b230b8c --- /dev/null +++ b/src/python/gudhi/nerve_gic.pyx @@ -0,0 +1,412 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +import os +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "GPL v3" + +cdef extern from "Nerve_gic_interface.h" namespace "Gudhi": + cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface": + Nerve_gic_interface() + double compute_confidence_level_from_distance(double distance) + double compute_distance_from_confidence_level(double alpha) + void compute_distribution(int N) + double compute_p_value() + vector[pair[double, double]] compute_PD() + void find_simplices() + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) + bool read_point_cloud(string off_file_name) + double set_automatic_resolution() + void set_color_from_coordinate(int k) + void set_color_from_file(string color_file_name) + void set_color_from_range(vector[double] color) + void set_cover_from_file(string cover_file_name) + void set_cover_from_function() + void set_cover_from_Euclidean_Voronoi(int m) + void set_function_from_coordinate(int k) + void set_function_from_file(string func_file_name) + void set_function_from_range(vector[double] function) + void set_gain(double g) + double set_graph_from_automatic_euclidean_rips(int N) + void set_graph_from_file(string graph_file_name) + void set_graph_from_OFF() + void set_graph_from_euclidean_rips(double threshold) + void set_mask(int nodemask) + void set_resolution_with_interval_length(double resolution) + void set_resolution_with_interval_number(int resolution) + void set_subsampling(double constant, double power) + void set_type(string type) + void set_verbose(bool verbose) + vector[int] subpopulation(int c) + void write_info() + void plot_DOT() + void plot_OFF() + void set_point_cloud_from_range(vector[vector[double]] cloud) + void set_distances_from_range(vector[vector[double]] distance_matrix) + +# CoverComplex python interface +cdef class CoverComplex: + """Cover complex data structure. + + The data structure is a simplicial complex, representing a Graph Induced + simplicial Complex (GIC) or a Nerve, and whose simplices are computed with + a cover C of a point cloud P, which often comes from the preimages of + intervals covering the image of a function f defined on P. These intervals + are parameterized by their resolution (either their length or their number) + and their gain (percentage of overlap). To compute a GIC, one also needs a + graph G built on top of P, whose cliques with vertices belonging to + different elements of C correspond to the simplices of the GIC. + """ + + cdef Nerve_gic_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self): + """CoverComplex constructor. + """ + + # The real cython constructor + def __cinit__(self): + self.thisptr = new Nerve_gic_interface() + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if CoverComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def set_point_cloud_from_range(self, cloud): + """ Reads and stores the input point cloud from a vector stored in memory. + + :param cloud: Input vector containing the point cloud. + :type cloud: vector[vector[double]] + """ + return self.thisptr.set_point_cloud_from_range(cloud) + + def set_distances_from_range(self, distance_matrix): + """ Reads and stores the input distance matrix from a vector stored in memory. + + :param distance_matrix: Input vector containing the distance matrix. + :type distance_matrix: vector[vector[double]] + """ + return self.thisptr.set_distances_from_range(distance_matrix) + + def compute_confidence_level_from_distance(self, distance): + """Computes the confidence level of a specific bottleneck distance + threshold. + + :param distance: Bottleneck distance. + :type distance: double + :rtype: double + :returns: Confidence level. + """ + return self.thisptr.compute_confidence_level_from_distance(distance) + + def compute_distance_from_confidence_level(self, alpha): + """Computes the bottleneck distance threshold corresponding to a + specific confidence level. + + :param alpha: Confidence level. + :type alpha: double + :rtype: double + :returns: Bottleneck distance. + """ + return self.thisptr.compute_distance_from_confidence_level(alpha) + + def compute_distribution(self, N=100): + """Computes bootstrapped distances distribution. + + :param N: Loop number (default value is 100). + :type alpha: int + """ + self.thisptr.compute_distribution(N) + + def compute_p_value(self): + """Computes the p-value, i.e. the opposite of the confidence level of + the largest bottleneck distance preserving the points in the + persistence diagram of the output simplicial complex. + + :rtype: double + :returns: p-value. + """ + return self.thisptr.compute_p_value() + + def compute_PD(self): + """Computes the extended persistence diagram of the complex. + """ + return self.thisptr.compute_PD() + + def create_simplex_tree(self): + """ + :returns: A simplex tree created from the Cover complex. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + self.thisptr.create_simplex_tree(stree_int_ptr) + return stree + + def find_simplices(self): + """Computes the simplices of the simplicial complex. + """ + self.thisptr.find_simplices() + + def read_point_cloud(self, off_file): + """Reads and stores the input point cloud from .(n)OFF file. + + :param off_file: Name of the input .OFF or .nOFF file. + :type off_file: string + :rtype: bool + :returns: Read file status. + """ + if os.path.isfile(off_file): + return self.thisptr.read_point_cloud(str.encode(off_file)) + else: + print("file " + off_file + " not found.") + return False + + def set_automatic_resolution(self): + """Computes the optimal length of intervals (i.e. the smallest interval + length avoiding discretization artifacts—see :cite:`Carriere17c`) for a + functional cover. + + :rtype: double + :returns: reso interval length used to compute the cover. + """ + return self.thisptr.set_automatic_resolution() + + def set_color_from_coordinate(self, k=0): + """Computes the function used to color the nodes of the simplicial + complex from the k-th coordinate. + + :param k: Coordinate to use (start at 0). Default value is 0. + :type k: int + """ + return self.thisptr.set_color_from_coordinate(k) + + def set_color_from_file(self, color_file_name): + """Computes the function used to color the nodes of the simplicial + complex from a file containing the function values. + + :param color_file_name: Name of the input color file. + :type color_file_name: string + """ + if os.path.isfile(color_file_name): + self.thisptr.set_color_from_file(str.encode(color_file_name)) + else: + print("file " + color_file_name + " not found.") + + def set_color_from_range(self, color): + """Computes the function used to color the nodes of the simplicial + complex from a vector stored in memory. + + :param color: Input vector of values. + :type color: vector[double] + """ + self.thisptr.set_color_from_range(color) + + def set_cover_from_file(self, cover_file_name): + """Creates the cover C from a file containing the cover elements of + each point (the order has to be the same as in the input file!). + + :param cover_file_name: Name of the input cover file. + :type cover_file_name: string + """ + if os.path.isfile(cover_file_name): + self.thisptr.set_cover_from_file(str.encode(cover_file_name)) + else: + print("file " + cover_file_name + " not found.") + + def set_cover_from_function(self): + """Creates a cover C from the preimages of the function f. + """ + self.thisptr.set_cover_from_function() + + def set_cover_from_Voronoi(self, m=100): + """Creates the cover C from the Voronoï cells of a subsampling of the + point cloud. + + :param m: Number of points in the subsample. Default value is 100. + :type m: int + """ + self.thisptr.set_cover_from_Euclidean_Voronoi(m) + + def set_function_from_coordinate(self, k): + """Creates the function f from the k-th coordinate of the point cloud. + + :param k: Coordinate to use (start at 0). + :type k: int + """ + self.thisptr.set_function_from_coordinate(k) + + def set_function_from_file(self, func_file_name): + """Creates the function f from a file containing the function values. + + :param func_file_name: Name of the input function file. + :type func_file_name: string + """ + if os.path.isfile(func_file_name): + self.thisptr.set_function_from_file(str.encode(func_file_name)) + else: + print("file " + func_file_name + " not found.") + + def set_function_from_range(self, function): + """Creates the function f from a vector stored in memory. + + :param function: Input vector of values. + :type function: vector[double] + """ + self.thisptr.set_function_from_range(function) + + def set_gain(self, g = 0.3): + """Sets a gain from a value stored in memory. + + :param g: Gain (default value is 0.3). + :type g: double + """ + self.thisptr.set_gain(g) + + def set_graph_from_automatic_rips(self, N=100): + """Creates a graph G from a Rips complex whose threshold value is + automatically tuned with subsampling—see. + + :param N: Number of subsampling iteration (the default reasonable value + is 100, but there is no guarantee on how to choose it). + :type N: int + :rtype: double + :returns: Delta threshold used for computing the Rips complex. + """ + return self.thisptr.set_graph_from_automatic_euclidean_rips(N) + + def set_graph_from_file(self, graph_file_name): + """Creates a graph G from a file containing the edges. + + :param graph_file_name: Name of the input graph file. The graph file + contains one edge per line, each edge being represented by the IDs of + its two nodes. + :type graph_file_name: string + """ + if os.path.isfile(graph_file_name): + self.thisptr.set_graph_from_file(str.encode(graph_file_name)) + else: + print("file " + graph_file_name + " not found.") + + def set_graph_from_OFF(self): + """Creates a graph G from the triangulation given by the input OFF + file. + """ + self.thisptr.set_graph_from_OFF() + + def set_graph_from_rips(self, threshold): + """Creates a graph G from a Rips complex. + + :param threshold: Threshold value for the Rips complex. + :type threshold: double + """ + self.thisptr.set_graph_from_euclidean_rips(threshold) + + def set_mask(self, nodemask): + """Sets the mask, which is a threshold integer such that nodes in the + complex that contain a number of data points which is less than or + equal to this threshold are not displayed. + + :param nodemask: Threshold. + :type nodemask: int + """ + self.thisptr.set_mask(nodemask) + + def set_resolution_with_interval_length(self, resolution): + """Sets a length of intervals from a value stored in memory. + + :param resolution: Length of intervals. + :type resolution: double + """ + self.thisptr.set_resolution_with_interval_length(resolution) + + def set_resolution_with_interval_number(self, resolution): + """Sets a number of intervals from a value stored in memory. + + :param resolution: Number of intervals. + :type resolution: int + """ + self.thisptr.set_resolution_with_interval_number(resolution) + + def set_subsampling(self, constant, power): + """Sets the constants used to subsample the data set. These constants + are explained in :cite:`Carriere17c`. + + :param constant: Constant. + :type constant: double + :param power: Power. + :type resolution: double + """ + self.thisptr.set_subsampling(constant, power) + + def set_type(self, type): + """Specifies whether the type of the output simplicial complex. + + :param type: either "GIC" or "Nerve". + :type type: string + """ + self.thisptr.set_type(str.encode(type)) + + def set_verbose(self, verbose): + """Specifies whether the program should display information or not. + + :param verbose: true = display info, false = do not display info. + :type verbose: boolean + """ + self.thisptr.set_verbose(verbose) + + def subpopulation(self, c): + """Returns the data subset corresponding to a specific node of the + created complex. + + :param c: ID of the node. + :type c: int + :rtype: vector[int] + :returns: Vector of IDs of data points. + """ + return self.thisptr.subpopulation(c) + + def write_info(self): + """Creates a .txt file called SC.txt describing the 1-skeleton, which can + then be plotted with e.g. KeplerMapper. + """ + return self.thisptr.write_info() + + def plot_dot(self): + """Creates a .dot file called SC.dot for neato (part of the graphviz + package) once the simplicial complex is computed to get a visualization of + its 1-skeleton in a .pdf file. + """ + return self.thisptr.plot_DOT() + + def plot_off(self): + """Creates a .off file called SC.off for 3D visualization, which contains + the 2-skeleton of the GIC. This function assumes that the cover has been + computed with Voronoi. If data points are in 1D or 2D, the remaining + coordinates of the points embedded in 3D are set to 0. + """ + return self.thisptr.plot_OFF() diff --git a/src/python/gudhi/off_reader.pyx b/src/python/gudhi/off_reader.pyx new file mode 100644 index 00000000..9efd97ff --- /dev/null +++ b/src/python/gudhi/off_reader.pyx @@ -0,0 +1,38 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.string cimport string +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Off_reader_interface.h" namespace "Gudhi": + vector[vector[double]] read_points_from_OFF_file(string off_file) + +def read_off(off_file=''): + """Read points from OFF file. + + :param off_file: An OFF file style name. + :type off_file: string + + :returns: The point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + return read_points_from_OFF_file(str.encode(off_file)) + else: + print("file " + off_file + " not found.") + return [] + diff --git a/src/python/gudhi/periodic_cubical_complex.pyx b/src/python/gudhi/periodic_cubical_complex.pyx new file mode 100644 index 00000000..724fadd4 --- /dev/null +++ b/src/python/gudhi/periodic_cubical_complex.pyx @@ -0,0 +1,190 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +import os + +from numpy import array as np_array + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Cubical_complex_interface.h" namespace "Gudhi": + cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface>": + Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells, vector[bool] periodic_dimensions) + Periodic_cubical_complex_base_interface(string perseus_file) + int num_simplices() + int dimension() + +cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": + cdef cppclass Periodic_cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface>>": + Periodic_cubical_complex_persistence_interface(Periodic_cubical_complex_base_interface * st, bool persistence_dim_max) + vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) + vector[int] betti_numbers() + vector[int] persistent_betti_numbers(double from_value, double to_value) + vector[pair[double,double]] intervals_in_dimension(int dimension) + +# PeriodicCubicalComplex python interface +cdef class PeriodicCubicalComplex: + """The PeriodicCubicalComplex is an example of a structured complex useful + in computational mathematics (specially rigorous numerics) and image + analysis. + """ + cdef Periodic_cubical_complex_base_interface * thisptr + + cdef Periodic_cubical_complex_persistence_interface * pcohptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, dimensions=None, top_dimensional_cells=None, + periodic_dimensions=None, perseus_file=''): + """PeriodicCubicalComplex constructor from dimensions and + top_dimensional_cells or from a Perseus-style file name. + + :param dimensions: A list of number of top dimensional cells. + :type dimensions: list of int + :param top_dimensional_cells: A list of cells filtration values. + :type top_dimensional_cells: list of double + :param periodic_dimensions: A list of top dimensional cells periodicity value. + :type periodic_dimensions: list of boolean + + Or + + :param perseus_file: A Perseus-style file name. + :type perseus_file: string + """ + + # The real cython constructor + def __cinit__(self, dimensions=None, top_dimensional_cells=None, + periodic_dimensions=None, perseus_file=''): + if (dimensions is not None) and (top_dimensional_cells is not None) and (periodic_dimensions is not None) and (perseus_file is ''): + self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells, periodic_dimensions) + elif (dimensions is None) and (top_dimensional_cells is None) and (periodic_dimensions is None) and (perseus_file is not ''): + if os.path.isfile(perseus_file): + self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file)) + else: + print("file " + perseus_file + " not found.") + else: + print("CubicalComplex can be constructed from dimensions and " + "top_dimensional_cells or from a Perseus-style file name.") + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + if self.pcohptr != NULL: + del self.pcohptr + + def __is_defined(self): + """Returns true if PeriodicCubicalComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def __is_persistence_defined(self): + """Returns true if Persistence pointer is not NULL. + """ + return self.pcohptr != NULL + + def num_simplices(self): + """This function returns the number of all cubes in the complex. + + :returns: int -- the number of all cubes in the complex. + """ + return self.thisptr.num_simplices() + + def dimension(self): + """This function returns the dimension of the complex. + + :returns: int -- the complex dimension. + """ + return self.thisptr.dimension() + + def persistence(self, homology_coeff_field=11, min_persistence=0): + """This function returns the persistence of the complex. + + :param homology_coeff_field: The homology coefficient field. Must be a + prime number + :type homology_coeff_field: int. + :param min_persistence: The minimum persistence value to take into + account (strictly greater than min_persistence). Default value is + 0.0. + Sets min_persistence to -1.0 to see all values. + :type min_persistence: float. + :returns: list of pairs(dimension, pair(birth, death)) -- the + persistence of the complex. + """ + if self.pcohptr != NULL: + del self.pcohptr + if self.thisptr != NULL: + self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True) + cdef vector[pair[int, pair[double, double]]] persistence_result + if self.pcohptr != NULL: + persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) + return persistence_result + + def betti_numbers(self): + """This function returns the Betti numbers of the complex. + + :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]). + + :note: betti_numbers function requires persistence function to be + launched first. + + :note: betti_numbers function always returns [1, 0, 0, ...] as infinity + filtration cubes are not removed from the complex. + """ + cdef vector[int] bn_result + if self.pcohptr != NULL: + bn_result = self.pcohptr.betti_numbers() + return bn_result + + def persistent_betti_numbers(self, from_value, to_value): + """This function returns the persistent Betti numbers of the complex. + + :param from_value: The persistence birth limit to be added in the + numbers (persistent birth <= from_value). + :type from_value: float. + :param to_value: The persistence death limit to be added in the + numbers (persistent death > to_value). + :type to_value: float. + + :returns: list of int -- The persistent Betti numbers ([B0, B1, ..., + Bn]). + + :note: persistent_betti_numbers function requires persistence + function to be launched first. + """ + cdef vector[int] pbn_result + if self.pcohptr != NULL: + pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) + return pbn_result + + def persistence_intervals_in_dimension(self, dimension): + """This function returns the persistence intervals of the complex in a + specific dimension. + + :param dimension: The specific dimension. + :type dimension: int. + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + + :note: intervals_in_dim function requires persistence function to be + launched first. + """ + cdef vector[pair[double,double]] intervals_result + if self.pcohptr != NULL: + intervals_result = self.pcohptr.intervals_in_dimension(dimension) + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") + return np_array(intervals_result) diff --git a/src/python/gudhi/persistence_graphical_tools.py b/src/python/gudhi/persistence_graphical_tools.py new file mode 100644 index 00000000..181bc8ea --- /dev/null +++ b/src/python/gudhi/persistence_graphical_tools.py @@ -0,0 +1,423 @@ +from os import path +from math import isfinite +import numpy as np + +from gudhi.reader_utils import read_persistence_intervals_in_dimension +from gudhi.reader_utils import read_persistence_intervals_grouped_by_dimension + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau, Bertrand Michel + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau, Bertrand Michel" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def __min_birth_max_death(persistence, band=0.0): + """This function returns (min_birth, max_death) from the persistence. + + :param persistence: The persistence to plot. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param band: band + :type band: float. + :returns: (float, float) -- (min_birth, max_death). + """ + # Look for minimum birth date and maximum death date for plot optimisation + max_death = 0 + min_birth = persistence[0][1][0] + for interval in reversed(persistence): + if float(interval[1][1]) != float("inf"): + if float(interval[1][1]) > max_death: + max_death = float(interval[1][1]) + if float(interval[1][0]) > max_death: + max_death = float(interval[1][0]) + if float(interval[1][0]) < min_birth: + min_birth = float(interval[1][0]) + if band > 0.0: + max_death += band + return (min_birth, max_death) + + +""" +Only 13 colors for the palette +""" +palette = [ + "#ff0000", + "#00ff00", + "#0000ff", + "#00ffff", + "#ff00ff", + "#ffff00", + "#000000", + "#880000", + "#008800", + "#000088", + "#888800", + "#880088", + "#008888", +] + + +def plot_persistence_barcode( + persistence=[], + persistence_file="", + alpha=0.6, + max_intervals=1000, + max_barcodes=1000, + inf_delta=0.1, + legend=False, +): + """This function plots the persistence bar code from persistence values list + or from a :doc:`persistence file `. + + :param persistence: Persistence intervals values list grouped by dimension. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param persistence_file: A :doc:`persistence file ` style name + (reset persistence if both are set). + :type persistence_file: string + :param alpha: barcode transparency value (0.0 transparent through 1.0 + opaque - default is 0.6). + :type alpha: float. + :param max_intervals: maximal number of intervals to display. + Selected intervals are those with the longest life time. Set it + to 0 to see all. Default value is 1000. + :type max_intervals: int. + :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x + inf_delta)` above :code:`max_death` value. A reasonable value is + between 0.05 and 0.5 - default is 0.1. + :type inf_delta: float. + :param legend: Display the dimension color legend (default is False). + :type legend: boolean. + :returns: A matplotlib object containing horizontal bar plot of persistence + (launch `show()` method on it to display it). + """ + try: + import matplotlib.pyplot as plt + import matplotlib.patches as mpatches + + if persistence_file is not "": + if path.isfile(persistence_file): + # Reset persistence + persistence = [] + diag = read_persistence_intervals_grouped_by_dimension( + persistence_file=persistence_file + ) + for key in diag.keys(): + for persistence_interval in diag[key]: + persistence.append((key, persistence_interval)) + else: + print("file " + persistence_file + " not found.") + return None + + if max_barcodes is not 1000: + print("Deprecated parameter. It has been replaced by max_intervals") + max_intervals = max_barcodes + + if max_intervals > 0 and max_intervals < len(persistence): + # Sort by life time, then takes only the max_intervals elements + persistence = sorted( + persistence, + key=lambda life_time: life_time[1][1] - life_time[1][0], + reverse=True, + )[:max_intervals] + + persistence = sorted(persistence, key=lambda birth: birth[1][0]) + + (min_birth, max_death) = __min_birth_max_death(persistence) + ind = 0 + delta = (max_death - min_birth) * inf_delta + # Replace infinity values with max_death + delta for bar code to be more + # readable + infinity = max_death + delta + axis_start = min_birth - delta + # Draw horizontal bars in loop + for interval in reversed(persistence): + if float(interval[1][1]) != float("inf"): + # Finite death case + plt.barh( + ind, + (interval[1][1] - interval[1][0]), + height=0.8, + left=interval[1][0], + alpha=alpha, + color=palette[interval[0]], + linewidth=0, + ) + else: + # Infinite death case for diagram to be nicer + plt.barh( + ind, + (infinity - interval[1][0]), + height=0.8, + left=interval[1][0], + alpha=alpha, + color=palette[interval[0]], + linewidth=0, + ) + ind = ind + 1 + + if legend: + dimensions = list(set(item[0] for item in persistence)) + plt.legend( + handles=[ + mpatches.Patch(color=palette[dim], label=str(dim)) + for dim in dimensions + ], + loc="lower right", + ) + plt.title("Persistence barcode") + # Ends plot on infinity value and starts a little bit before min_birth + plt.axis([axis_start, infinity, 0, ind]) + return plt + + except ImportError: + print("This function is not available, you may be missing matplotlib.") + + +def plot_persistence_diagram( + persistence=[], + persistence_file="", + alpha=0.6, + band=0.0, + max_intervals=1000, + max_plots=1000, + inf_delta=0.1, + legend=False, +): + """This function plots the persistence diagram from persistence values + list or from a :doc:`persistence file `. + + :param persistence: Persistence intervals values list grouped by dimension. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param persistence_file: A :doc:`persistence file ` style name + (reset persistence if both are set). + :type persistence_file: string + :param alpha: plot transparency value (0.0 transparent through 1.0 + opaque - default is 0.6). + :type alpha: float. + :param band: band (not displayed if :math:`\leq` 0. - default is 0.) + :type band: float. + :param max_intervals: maximal number of intervals to display. + Selected intervals are those with the longest life time. Set it + to 0 to see all. Default value is 1000. + :type max_intervals: int. + :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x + inf_delta)` above :code:`max_death` value. A reasonable value is + between 0.05 and 0.5 - default is 0.1. + :type inf_delta: float. + :param legend: Display the dimension color legend (default is False). + :type legend: boolean. + :returns: A matplotlib object containing diagram plot of persistence + (launch `show()` method on it to display it). + """ + try: + import matplotlib.pyplot as plt + import matplotlib.patches as mpatches + + if persistence_file is not "": + if path.isfile(persistence_file): + # Reset persistence + persistence = [] + diag = read_persistence_intervals_grouped_by_dimension( + persistence_file=persistence_file + ) + for key in diag.keys(): + for persistence_interval in diag[key]: + persistence.append((key, persistence_interval)) + else: + print("file " + persistence_file + " not found.") + return None + + if max_plots is not 1000: + print("Deprecated parameter. It has been replaced by max_intervals") + max_intervals = max_plots + + if max_intervals > 0 and max_intervals < len(persistence): + # Sort by life time, then takes only the max_intervals elements + persistence = sorted( + persistence, + key=lambda life_time: life_time[1][1] - life_time[1][0], + reverse=True, + )[:max_intervals] + + (min_birth, max_death) = __min_birth_max_death(persistence, band) + delta = (max_death - min_birth) * inf_delta + # Replace infinity values with max_death + delta for diagram to be more + # readable + infinity = max_death + delta + axis_start = min_birth - delta + + # line display of equation : birth = death + x = np.linspace(axis_start, infinity, 1000) + # infinity line and text + plt.plot(x, x, color="k", linewidth=1.0) + plt.plot(x, [infinity] * len(x), linewidth=1.0, color="k", alpha=alpha) + plt.text(axis_start, infinity, r"$\infty$", color="k", alpha=alpha) + # bootstrap band + if band > 0.0: + plt.fill_between(x, x, x + band, alpha=alpha, facecolor="red") + + # Draw points in loop + for interval in reversed(persistence): + if float(interval[1][1]) != float("inf"): + # Finite death case + plt.scatter( + interval[1][0], + interval[1][1], + alpha=alpha, + color=palette[interval[0]], + ) + else: + # Infinite death case for diagram to be nicer + plt.scatter( + interval[1][0], infinity, alpha=alpha, color=palette[interval[0]] + ) + + if legend: + dimensions = list(set(item[0] for item in persistence)) + plt.legend( + handles=[ + mpatches.Patch(color=palette[dim], label=str(dim)) + for dim in dimensions + ] + ) + + plt.title("Persistence diagram") + plt.xlabel("Birth") + plt.ylabel("Death") + # Ends plot on infinity value and starts a little bit before min_birth + plt.axis([axis_start, infinity, axis_start, infinity + delta]) + return plt + + except ImportError: + print("This function is not available, you may be missing matplotlib.") + + +def plot_persistence_density( + persistence=[], + persistence_file="", + nbins=300, + bw_method=None, + max_intervals=1000, + dimension=None, + cmap=None, + legend=False, +): + """This function plots the persistence density from persistence + values list or from a :doc:`persistence file `. Be + aware that this function does not distinguish the dimension, it is + up to you to select the required one. This function also does not handle + degenerate data set (scipy correlation matrix inversion can fail). + + :param persistence: Persistence intervals values list grouped by dimension. + :type persistence: list of tuples(dimension, tuple(birth, death)). + :param persistence_file: A :doc:`persistence file ` + style name (reset persistence if both are set). + :type persistence_file: string + :param nbins: Evaluate a gaussian kde on a regular grid of nbins x + nbins over data extents (default is 300) + :type nbins: int. + :param bw_method: The method used to calculate the estimator + bandwidth. This can be 'scott', 'silverman', a scalar constant + or a callable. If a scalar, this will be used directly as + kde.factor. If a callable, it should take a gaussian_kde + instance as only parameter and return a scalar. If None + (default), 'scott' is used. See + `scipy.stats.gaussian_kde documentation + `_ + for more details. + :type bw_method: str, scalar or callable, optional. + :param max_intervals: maximal number of points used in the density + estimation. + Selected intervals are those with the longest life time. Set it + to 0 to see all. Default value is 1000. + :type max_intervals: int. + :param dimension: the dimension to be selected in the intervals + (default is None to mix all dimensions). + :type dimension: int. + :param cmap: A matplotlib colormap (default is + matplotlib.pyplot.cm.hot_r). + :type cmap: cf. matplotlib colormap. + :param legend: Display the color bar values (default is False). + :type legend: boolean. + :returns: A matplotlib object containing diagram plot of persistence + (launch `show()` method on it to display it). + """ + try: + import matplotlib.pyplot as plt + from scipy.stats import kde + + if persistence_file is not "": + if dimension is None: + # All dimension case + dimension = -1 + if path.isfile(persistence_file): + persistence_dim = read_persistence_intervals_in_dimension( + persistence_file=persistence_file, only_this_dim=dimension + ) + print(persistence_dim) + else: + print("file " + persistence_file + " not found.") + return None + + if len(persistence) > 0: + persistence_dim = np.array( + [ + (dim_interval[1][0], dim_interval[1][1]) + for dim_interval in persistence + if (dim_interval[0] == dimension) or (dimension is None) + ] + ) + + persistence_dim = persistence_dim[np.isfinite(persistence_dim[:, 1])] + if max_intervals > 0 and max_intervals < len(persistence_dim): + # Sort by life time, then takes only the max_intervals elements + persistence_dim = np.array( + sorted( + persistence_dim, + key=lambda life_time: life_time[1] - life_time[0], + reverse=True, + )[:max_intervals] + ) + + # Set as numpy array birth and death (remove undefined values - inf and NaN) + birth = persistence_dim[:, 0] + death = persistence_dim[:, 1] + + # line display of equation : birth = death + x = np.linspace(death.min(), birth.max(), 1000) + plt.plot(x, x, color="k", linewidth=1.0) + + # Evaluate a gaussian kde on a regular grid of nbins x nbins over data extents + k = kde.gaussian_kde([birth, death], bw_method=bw_method) + xi, yi = np.mgrid[ + birth.min() : birth.max() : nbins * 1j, + death.min() : death.max() : nbins * 1j, + ] + zi = k(np.vstack([xi.flatten(), yi.flatten()])) + + # default cmap value cannot be done at argument definition level as matplotlib is not yet defined. + if cmap is None: + cmap = plt.cm.hot_r + # Make the plot + plt.pcolormesh(xi, yi, zi.reshape(xi.shape), cmap=cmap) + + if legend: + plt.colorbar() + + plt.title("Persistence density") + plt.xlabel("Birth") + plt.ylabel("Death") + return plt + + except ImportError: + print( + "This function is not available, you may be missing matplotlib and/or scipy." + ) diff --git a/src/python/gudhi/reader_utils.pyx b/src/python/gudhi/reader_utils.pyx new file mode 100644 index 00000000..147fae71 --- /dev/null +++ b/src/python/gudhi/reader_utils.pyx @@ -0,0 +1,87 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.string cimport string +from libcpp.map cimport map +from libcpp.pair cimport pair + +from os import path +from numpy import array as np_array + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2017 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2017 Inria" +__license__ = "MIT" + +cdef extern from "Reader_utils_interface.h" namespace "Gudhi": + vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator) + map[int, vector[pair[double, double]]] read_pers_intervals_grouped_by_dimension(string filename) + vector[pair[double, double]] read_pers_intervals_in_dimension(string filename, int only_this_dim) + +def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'): + """Read lower triangular matrix from a CSV style file. + + :param csv_file: A CSV file style name. + :type csv_file: string + :param separator: The value separator in the CSV file. Default value is ';' + :type separator: char + + :returns: The lower triangular matrix. + :rtype: vector[vector[double]] + """ + if csv_file is not '': + if path.isfile(csv_file): + return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0])) + print("file " + csv_file + " not set or not found.") + return [] + +def read_persistence_intervals_grouped_by_dimension(persistence_file=''): + """Reads a file containing persistence intervals. + Each line might contain 2, 3 or 4 values: [[field] dimension] birth death + The return value is an `map[dim, vector[pair[birth, death]]]` + where `dim` is an `int`, `birth` a `double`, and `death` a `double`. + Note: the function does not check that birth <= death. + + :param persistence_file: A persistence file style name. + :type persistence_file: string + + :returns: The persistence pairs grouped by dimension. + :rtype: map[int, vector[pair[double, double]]] + """ + if persistence_file is not '': + if path.isfile(persistence_file): + return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file)) + print("file " + persistence_file + " not set or not found.") + return [] + +def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-1): + """Reads a file containing persistence intervals. + Each line of persistence_file might contain 2, 3 or 4 values: + [[field] dimension] birth death + Note: the function does not check that birth <= death. + + :param persistence_file: A persistence file style name. + :type persistence_file: string + :param only_this_dim: The specific dimension. Default value is -1. + If `only_this_dim` = -1, dimension is ignored and all lines are returned. + If `only_this_dim` is >= 0, only the lines where dimension = + `only_this_dim` (or where dimension is not specified) are returned. + :type only_this_dim: int. + + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + """ + if persistence_file is not '': + if path.isfile(persistence_file): + return np_array(read_pers_intervals_in_dimension(str.encode( + persistence_file), only_this_dim)) + print("file " + persistence_file + " not set or not found.") + return [] diff --git a/src/python/gudhi/rips_complex.pyx b/src/python/gudhi/rips_complex.pyx new file mode 100644 index 00000000..f2cd6a8d --- /dev/null +++ b/src/python/gudhi/rips_complex.pyx @@ -0,0 +1,103 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Rips_complex_interface.h" namespace "Gudhi": + cdef cppclass Rips_complex_interface "Gudhi::rips_complex::Rips_complex_interface": + Rips_complex_interface() + void init_points(vector[vector[double]] values, double threshold) + void init_matrix(vector[vector[double]] values, double threshold) + void init_points_sparse(vector[vector[double]] values, double threshold, double sparse) + void init_matrix_sparse(vector[vector[double]] values, double threshold, double sparse) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max) + +# RipsComplex python interface +cdef class RipsComplex: + """The data structure is a one skeleton graph, or Rips graph, containing + edges when the edge length is less or equal to a given threshold. Edge + length is computed from a user given point cloud with a given distance + function, or a distance matrix. + """ + + cdef Rips_complex_interface thisref + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, points=None, distance_matrix=None, + max_edge_length=float('inf'), sparse=None): + """RipsComplex constructor. + + :param max_edge_length: Rips value. + :type max_edge_length: float + + :param points: A list of points in d-Dimension. + :type points: list of list of double + + Or + + :param distance_matrix: A distance matrix (full square or lower + triangular). + :type points: list of list of double + + And in both cases + + :param sparse: If this is not None, it switches to building a sparse + Rips and represents the approximation parameter epsilon. + :type sparse: float + """ + + # The real cython constructor + def __cinit__(self, points=None, distance_matrix=None, + max_edge_length=float('inf'), sparse=None): + if sparse is not None: + if distance_matrix is not None: + self.thisref.init_matrix_sparse(distance_matrix, + max_edge_length, + sparse) + else: + if points is None: + # Empty Rips construction + points=[] + self.thisref.init_points_sparse(points, max_edge_length, sparse) + else: + if distance_matrix is not None: + self.thisref.init_matrix(distance_matrix, max_edge_length) + else: + if points is None: + # Empty Rips construction + points=[] + self.thisref.init_points(points, max_edge_length) + + + def create_simplex_tree(self, max_dimension=1): + """ + :param max_dimension: graph expansion for rips until this given maximal + dimension. + :type max_dimension: int + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + self.thisref.create_simplex_tree(stree_int_ptr, + max_dimension) + return stree diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd new file mode 100644 index 00000000..5f86cfe2 --- /dev/null +++ b/src/python/gudhi/simplex_tree.pxd @@ -0,0 +1,56 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp cimport bool +from libcpp.string cimport string + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": + cdef cppclass Simplex_tree_options_full_featured: + pass + + cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface": + Simplex_tree() + double simplex_filtration(vector[int] simplex) + void assign_simplex_filtration(vector[int] simplex, double filtration) + void initialize_filtration() + int num_vertices() + int num_simplices() + void set_dimension(int dimension) + int dimension() + int upper_bound_dimension() + bool find_simplex(vector[int] simplex) + bool insert_simplex_and_subfaces(vector[int] simplex, + double filtration) + vector[pair[vector[int], double]] get_filtration() + vector[pair[vector[int], double]] get_skeleton(int dimension) + vector[pair[vector[int], double]] get_star(vector[int] simplex) + vector[pair[vector[int], double]] get_cofaces(vector[int] simplex, + int dimension) + void expansion(int max_dim) + void remove_maximal_simplex(vector[int] simplex) + bool prune_above_filtration(double filtration) + bool make_filtration_non_decreasing() + +cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": + cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": + Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max) + vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence) + vector[int] betti_numbers() + vector[int] persistent_betti_numbers(double from_value, double to_value) + vector[pair[double,double]] intervals_in_dimension(int dimension) + void write_output_diagram(string diagram_file_name) + vector[pair[vector[int], vector[int]]] persistence_pairs() diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx new file mode 100644 index 00000000..9f490271 --- /dev/null +++ b/src/python/gudhi/simplex_tree.pyx @@ -0,0 +1,508 @@ +from libc.stdint cimport intptr_t +from numpy import array as np_array +cimport simplex_tree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +# SimplexTree python interface +cdef class SimplexTree: + """The simplex tree is an efficient and flexible data structure for + representing general (filtered) simplicial complexes. The data structure + is described in Jean-Daniel Boissonnat and Clément Maria. The Simplex + Tree: An Efficient Data Structure for General Simplicial Complexes. + Algorithmica, pages 1–22, 2014. + + This class is a filtered, with keys, and non contiguous vertices version + of the simplex tree. + """ + # unfortunately 'cdef public Simplex_tree_interface_full_featured* thisptr' is not possible + # Use intptr_t instead to cast the pointer + cdef public intptr_t thisptr + + # Get the pointer casted as it should be + cdef Simplex_tree_interface_full_featured* get_ptr(self): + return (self.thisptr) + + cdef Simplex_tree_persistence_interface * pcohptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self): + """SimplexTree constructor. + """ + + # The real cython constructor + def __cinit__(self): + self.thisptr = (new Simplex_tree_interface_full_featured()) + + def __dealloc__(self): + cdef Simplex_tree_interface_full_featured* ptr = self.get_ptr() + if ptr != NULL: + del ptr + if self.pcohptr != NULL: + del self.pcohptr + + def __is_defined(self): + """Returns true if SimplexTree pointer is not NULL. + """ + return self.get_ptr() != NULL + + def __is_persistence_defined(self): + """Returns true if Persistence pointer is not NULL. + """ + return self.pcohptr != NULL + + def filtration(self, simplex): + """This function returns the filtration value for a given N-simplex in + this simplicial complex, or +infinity if it is not in the complex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :returns: The simplicial complex filtration value. + :rtype: float + """ + return self.get_ptr().simplex_filtration(simplex) + + def assign_filtration(self, simplex, filtration): + """This function assigns the simplicial complex filtration value for a + given N-simplex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :param filtration: The simplicial complex filtration value. + :type filtration: float + """ + self.get_ptr().assign_simplex_filtration(simplex, filtration) + + def initialize_filtration(self): + """This function initializes and sorts the simplicial complex + filtration vector. + + .. note:: + + This function must be launched before + :func:`persistence()`, + :func:`betti_numbers()`, + :func:`persistent_betti_numbers()`, + or :func:`get_filtration()` + after :func:`inserting` or + :func:`removing` + simplices. + """ + self.get_ptr().initialize_filtration() + + def num_vertices(self): + """This function returns the number of vertices of the simplicial + complex. + + :returns: The simplicial complex number of vertices. + :rtype: int + """ + return self.get_ptr().num_vertices() + + def num_simplices(self): + """This function returns the number of simplices of the simplicial + complex. + + :returns: the simplicial complex number of simplices. + :rtype: int + """ + return self.get_ptr().num_simplices() + + def dimension(self): + """This function returns the dimension of the simplicial complex. + + :returns: the simplicial complex dimension. + :rtype: int + + .. note:: + + This function is not constant time because it can recompute + dimension if required (can be triggered by + :func:`remove_maximal_simplex()` + or + :func:`prune_above_filtration()` + methods). + """ + return self.get_ptr().dimension() + + def upper_bound_dimension(self): + """This function returns a valid dimension upper bound of the + simplicial complex. + + :returns: an upper bound on the dimension of the simplicial complex. + :rtype: int + """ + return self.get_ptr().upper_bound_dimension() + + def set_dimension(self, dimension): + """This function sets the dimension of the simplicial complex. + + :param dimension: The new dimension value. + :type dimension: int. + + .. note:: + + This function must be used with caution because it disables + dimension recomputation when required + (this recomputation can be triggered by + :func:`remove_maximal_simplex()` + or + :func:`prune_above_filtration()` + ). + """ + self.get_ptr().set_dimension(dimension) + + def find(self, simplex): + """This function returns if the N-simplex was found in the simplicial + complex or not. + + :param simplex: The N-simplex to find, represented by a list of vertex. + :type simplex: list of int. + :returns: true if the simplex was found, false otherwise. + :rtype: bool + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + return self.get_ptr().find_simplex(csimplex) + + def insert(self, simplex, filtration=0.0): + """This function inserts the given N-simplex and its subfaces with the + given filtration value (default value is '0.0'). If some of those + simplices are already present with a higher filtration value, their + filtration value is lowered. + + :param simplex: The N-simplex to insert, represented by a list of + vertex. + :type simplex: list of int. + :param filtration: The filtration value of the simplex. + :type filtration: float. + :returns: true if the simplex was not yet in the complex, false + otherwise (whatever its original filtration value). + :rtype: bool + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + return self.get_ptr().insert_simplex_and_subfaces(csimplex, + filtration) + + def get_filtration(self): + """This function returns a list of all simplices with their given + filtration values. + + :returns: The simplices sorted by increasing filtration values. + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[pair[vector[int], double]] filtration \ + = self.get_ptr().get_filtration() + ct = [] + for filtered_complex in filtration: + v = [] + for vertex in filtered_complex.first: + v.append(vertex) + ct.append((v, filtered_complex.second)) + return ct + + def get_skeleton(self, dimension): + """This function returns the (simplices of the) skeleton of a maximum + given dimension. + + :param dimension: The skeleton dimension value. + :type dimension: int. + :returns: The (simplices of the) skeleton of a maximum dimension. + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[pair[vector[int], double]] skeleton \ + = self.get_ptr().get_skeleton(dimension) + ct = [] + for filtered_simplex in skeleton: + v = [] + for vertex in filtered_simplex.first: + v.append(vertex) + ct.append((v, filtered_simplex.second)) + return ct + + def get_star(self, simplex): + """This function returns the star of a given N-simplex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :returns: The (simplices of the) star of a simplex. + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + cdef vector[pair[vector[int], double]] star \ + = self.get_ptr().get_star(csimplex) + ct = [] + for filtered_simplex in star: + v = [] + for vertex in filtered_simplex.first: + v.append(vertex) + ct.append((v, filtered_simplex.second)) + return ct + + def get_cofaces(self, simplex, codimension): + """This function returns the cofaces of a given N-simplex with a + given codimension. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + :param codimension: The codimension. If codimension = 0, all cofaces + are returned (equivalent of get_star function) + :type codimension: int. + :returns: The (simplices of the) cofaces of a simplex + :rtype: list of tuples(simplex, filtration) + """ + cdef vector[int] csimplex + for i in simplex: + csimplex.push_back(i) + cdef vector[pair[vector[int], double]] cofaces \ + = self.get_ptr().get_cofaces(csimplex, codimension) + ct = [] + for filtered_simplex in cofaces: + v = [] + for vertex in filtered_simplex.first: + v.append(vertex) + ct.append((v, filtered_simplex.second)) + return ct + + def remove_maximal_simplex(self, simplex): + """This function removes a given maximal N-simplex from the simplicial + complex. + + :param simplex: The N-simplex, represented by a list of vertex. + :type simplex: list of int. + + .. note:: + + Be aware that removing is shifting data in a flat_map + (:func:`initialize_filtration()` to be done). + + .. note:: + + The dimension of the simplicial complex may be lower after calling + remove_maximal_simplex than it was before. However, + :func:`upper_bound_dimension()` + method will return the old value, which + remains a valid upper bound. If you care, you can call + :func:`dimension()` + to recompute the exact dimension. + """ + self.get_ptr().remove_maximal_simplex(simplex) + + def prune_above_filtration(self, filtration): + """Prune above filtration value given as parameter. + + :param filtration: Maximum threshold value. + :type filtration: float. + :returns: The filtration modification information. + :rtype: bool + + + .. note:: + + Some simplex tree functions require the filtration to be valid. + prune_above_filtration function is not launching + :func:`initialize_filtration()` + but returns the filtration modification + information. If the complex has changed , please call + :func:`initialize_filtration()` + to recompute it. + + .. note:: + + Note that the dimension of the simplicial complex may be lower + after calling + :func:`prune_above_filtration()` + than it was before. However, + :func:`upper_bound_dimension()` + will return the old value, which remains a + valid upper bound. If you care, you can call + :func:`dimension()` + method to recompute the exact dimension. + """ + return self.get_ptr().prune_above_filtration(filtration) + + def expansion(self, max_dim): + """Expands the Simplex_tree containing only its one skeleton + until dimension max_dim. + + The expanded simplicial complex until dimension :math:`d` + attached to a graph :math:`G` is the maximal simplicial complex of + dimension at most :math:`d` admitting the graph :math:`G` as + :math:`1`-skeleton. + The filtration value assigned to a simplex is the maximal filtration + value of one of its edges. + + The Simplex_tree must contain no simplex of dimension bigger than + 1 when calling the method. + + :param max_dim: The maximal dimension. + :type max_dim: int. + """ + self.get_ptr().expansion(max_dim) + + def make_filtration_non_decreasing(self): + """This function ensures that each simplex has a higher filtration + value than its faces by increasing the filtration values. + + :returns: True if any filtration value was modified, + False if the filtration was already non-decreasing. + :rtype: bool + + + .. note:: + + Some simplex tree functions require the filtration to be valid. + make_filtration_non_decreasing function is not launching + :func:`initialize_filtration()` + but returns the filtration modification + information. If the complex has changed , please call + :func:`initialize_filtration()` + to recompute it. + """ + return self.get_ptr().make_filtration_non_decreasing() + + def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False): + """This function returns the persistence of the simplicial complex. + + :param homology_coeff_field: The homology coefficient field. Must be a + prime number. Default value is 11. + :type homology_coeff_field: int. + :param min_persistence: The minimum persistence value to take into + account (strictly greater than min_persistence). Default value is + 0.0. + Sets min_persistence to -1.0 to see all values. + :type min_persistence: float. + :param persistence_dim_max: If true, the persistent homology for the + maximal dimension in the complex is computed. If false, it is + ignored. Default is false. + :type persistence_dim_max: bool + :returns: The persistence of the simplicial complex. + :rtype: list of pairs(dimension, pair(birth, death)) + """ + if self.pcohptr != NULL: + del self.pcohptr + self.pcohptr = new Simplex_tree_persistence_interface(self.get_ptr(), persistence_dim_max) + cdef vector[pair[int, pair[double, double]]] persistence_result + if self.pcohptr != NULL: + persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence) + return persistence_result + + def betti_numbers(self): + """This function returns the Betti numbers of the simplicial complex. + + :returns: The Betti numbers ([B0, B1, ..., Bn]). + :rtype: list of int + + :note: betti_numbers function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[int] bn_result + if self.pcohptr != NULL: + bn_result = self.pcohptr.betti_numbers() + else: + print("betti_numbers function requires persistence function" + " to be launched first.") + return bn_result + + def persistent_betti_numbers(self, from_value, to_value): + """This function returns the persistent Betti numbers of the + simplicial complex. + + :param from_value: The persistence birth limit to be added in the + numbers (persistent birth <= from_value). + :type from_value: float. + :param to_value: The persistence death limit to be added in the + numbers (persistent death > to_value). + :type to_value: float. + + :returns: The persistent Betti numbers ([B0, B1, ..., Bn]). + :rtype: list of int + + :note: persistent_betti_numbers function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[int] pbn_result + if self.pcohptr != NULL: + pbn_result = self.pcohptr.persistent_betti_numbers(from_value, to_value) + else: + print("persistent_betti_numbers function requires persistence function" + " to be launched first.") + return pbn_result + + def persistence_intervals_in_dimension(self, dimension): + """This function returns the persistence intervals of the simplicial + complex in a specific dimension. + + :param dimension: The specific dimension. + :type dimension: int. + :returns: The persistence intervals. + :rtype: numpy array of dimension 2 + + :note: intervals_in_dim function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[pair[double,double]] intervals_result + if self.pcohptr != NULL: + intervals_result = self.pcohptr.intervals_in_dimension(dimension) + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") + return np_array(intervals_result) + + def persistence_pairs(self): + """This function returns a list of persistence birth and death simplices pairs. + + :returns: A list of persistence simplices intervals. + :rtype: list of pair of list of int + + :note: persistence_pairs function requires + :func:`persistence()` + function to be launched first. + """ + cdef vector[pair[vector[int],vector[int]]] persistence_pairs_result + if self.pcohptr != NULL: + persistence_pairs_result = self.pcohptr.persistence_pairs() + else: + print("persistence_pairs function requires persistence function" + " to be launched first.") + return persistence_pairs_result + + def write_persistence_diagram(self, persistence_file=''): + """This function writes the persistence intervals of the simplicial + complex in a user given file name. + + :param persistence_file: The specific dimension. + :type persistence_file: string. + + :note: intervals_in_dim function requires + :func:`persistence()` + function to be launched first. + """ + if self.pcohptr != NULL: + if persistence_file != '': + self.pcohptr.write_output_diagram(str.encode(persistence_file)) + else: + print("persistence_file must be specified") + else: + print("intervals_in_dim function requires persistence function" + " to be launched first.") diff --git a/src/python/gudhi/strong_witness_complex.pyx b/src/python/gudhi/strong_witness_complex.pyx new file mode 100644 index 00000000..e757abea --- /dev/null +++ b/src/python/gudhi/strong_witness_complex.pyx @@ -0,0 +1,78 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface": + Strong_witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + +# StrongWitnessComplex python interface +cdef class StrongWitnessComplex: + """Constructs (strong) witness complex for a given table of nearest + landmarks with respect to witnesses. + """ + + cdef Strong_witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, nearest_landmark_table=None): + """StrongWitnessComplex constructor. + + :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. + `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to + witness w, and d is the (squared) distance between l and w. + :type nearest_landmark_table: list of list of pair of int and float + """ + + # The real cython constructor + def __cinit__(self, nearest_landmark_table=None): + if nearest_landmark_table is not None: + self.thisptr = new Strong_witness_complex_interface(nearest_landmark_table) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if StrongWitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): + """ + :param max_alpha_square: The maximum relaxation parameter. + Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) + return stree diff --git a/src/python/gudhi/subsampling.pyx b/src/python/gudhi/subsampling.pyx new file mode 100644 index 00000000..1135c1fb --- /dev/null +++ b/src/python/gudhi/subsampling.pyx @@ -0,0 +1,130 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.string cimport string +from libcpp cimport bool +import os + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling": + vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points) + vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points, unsigned starting_point) + vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points) + vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points, unsigned starting_point) + vector[vector[double]] subsampling_n_random_points(vector[vector[double]] points, unsigned nb_points) + vector[vector[double]] subsampling_n_random_points_from_file(string off_file, unsigned nb_points) + vector[vector[double]] subsampling_sparsify_points(vector[vector[double]] points, double min_squared_dist) + vector[vector[double]] subsampling_sparsify_points_from_file(string off_file, double min_squared_dist) + +def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_point = ''): + """Subsample by a greedy strategy of iteratively adding the farthest point + from the current chosen point set to the subsampling. + The iteration starts with the landmark `starting point`. + + :param points: The input point set. + :type points: vector[vector[double]]. + + Or + + :param off_file: An OFF file style name. + :type off_file: string + + :param nb_points: Number of points of the subsample. + :type nb_points: unsigned. + :param starting_point: The iteration starts with the landmark `starting \ + point`,which is the index of the poit to start with. If not set, this \ + index is choosen randomly. + :type starting_point: unsigned. + :returns: The subsample point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + if starting_point is '': + return subsampling_n_farthest_points_from_file(str.encode(off_file), + nb_points) + else: + return subsampling_n_farthest_points_from_file(str.encode(off_file), + nb_points, + starting_point) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty points + points=[] + if starting_point is '': + return subsampling_n_farthest_points(points, nb_points) + else: + return subsampling_n_farthest_points(points, nb_points, + starting_point) + +def pick_n_random_points(points=None, off_file='', nb_points=0): + """Subsample a point set by picking random vertices. + + :param points: The input point set. + :type points: vector[vector[double]]. + + Or + + :param off_file: An OFF file style name. + :type off_file: string + + :param nb_points: Number of points of the subsample. + :type nb_points: unsigned. + :returns: The subsample point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + return subsampling_n_random_points_from_file(str.encode(off_file), + nb_points) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty points + points=[] + return subsampling_n_random_points(points, nb_points) + +def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0): + """Outputs a subset of the input points so that the squared distance + between any two points is greater than or equal to min_squared_dist. + + :param points: The input point set. + :type points: vector[vector[double]]. + + Or + + :param off_file: An OFF file style name. + :type off_file: string + + :param min_squared_dist: Minimum squared distance separating the output \ + points. + :type min_squared_dist: float. + :returns: The subsample point set. + :rtype: vector[vector[double]] + """ + if off_file is not '': + if os.path.isfile(off_file): + return subsampling_sparsify_points_from_file(str.encode(off_file), + min_squared_dist) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty points + points=[] + return subsampling_sparsify_points(points, min_squared_dist) diff --git a/src/python/gudhi/tangential_complex.pyx b/src/python/gudhi/tangential_complex.pyx new file mode 100644 index 00000000..3a945fe2 --- /dev/null +++ b/src/python/gudhi/tangential_complex.pyx @@ -0,0 +1,173 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libcpp.string cimport string +from libcpp cimport bool +from libc.stdint cimport intptr_t +import os + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "GPL v3" + +cdef extern from "Tangential_complex_interface.h" namespace "Gudhi": + cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface": + Tangential_complex_interface(int intrisic_dim, vector[vector[double]] points) + # bool from_file is a workaround for cython to find the correct signature + Tangential_complex_interface(int intrisic_dim, string off_file, bool from_file) + void compute_tangential_complex() except + + vector[double] get_point(unsigned vertex) + unsigned number_of_vertices() + unsigned number_of_simplices() + unsigned number_of_inconsistent_simplices() + unsigned number_of_inconsistent_stars() + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree) + void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) + void set_max_squared_edge_length(double max_squared_edge_length) + +# TangentialComplex python interface +cdef class TangentialComplex: + """The class Tangential_complex represents a tangential complex. After the + computation of the complex, an optional post-processing called perturbation + can be run to attempt to remove inconsistencies. + """ + + cdef Tangential_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, intrisic_dim, points=None, off_file=''): + """TangentialComplex constructor. + + :param intrisic_dim: Intrinsic dimension of the manifold. + :type intrisic_dim: integer + + :param points: A list of points in d-Dimension. + :type points: list of list of double + + Or + + :param off_file: An OFF file style name. + :type off_file: string + """ + + # The real cython constructor + def __cinit__(self, intrisic_dim, points=None, off_file=''): + if off_file is not '': + if os.path.isfile(off_file): + self.thisptr = new Tangential_complex_interface(intrisic_dim, str.encode(off_file), True) + else: + print("file " + off_file + " not found.") + else: + if points is None: + # Empty tangential construction + points=[] + self.thisptr = new Tangential_complex_interface(intrisic_dim, points) + + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if TangentialComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def compute_tangential_complex(self): + """This function computes the tangential complex. + + Raises: + ValueError: In debug mode, if the computed star dimension is too + low. Try to set a bigger maximal edge length value with + :func:`~gudhi.Tangential_complex.set_max_squared_edge_length` + if this happens. + """ + self.thisptr.compute_tangential_complex() + + def get_point(self, vertex): + """This function returns the point corresponding to a given vertex. + + :param vertex: The vertex. + :type vertex: int. + :returns: The point. + :rtype: list of float + """ + cdef vector[double] point = self.thisptr.get_point(vertex) + return point + + def num_vertices(self): + """ + :returns: The number of vertices. + :rtype: unsigned + """ + return self.thisptr.number_of_vertices() + + def num_simplices(self): + """ + :returns: Total number of simplices in stars (including duplicates that appear in several stars). + :rtype: unsigned + """ + return self.thisptr.number_of_simplices() + + def num_inconsistent_simplices(self): + """ + :returns: The number of inconsistent simplices. + :rtype: unsigned + """ + return self.thisptr.number_of_inconsistent_simplices() + + def num_inconsistent_stars(self): + """ + :returns: The number of stars containing at least one inconsistent simplex. + :rtype: unsigned + """ + return self.thisptr.number_of_inconsistent_stars() + + def create_simplex_tree(self): + """Exports the complex into a simplex tree. + + :returns: A simplex tree created from the complex. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + self.thisptr.create_simplex_tree(stree_int_ptr) + return stree + + def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0): + """Attempts to fix inconsistencies by perturbing the point positions. + + :param max_perturb: Maximum length of the translations used by the + perturbation. + :type max_perturb: double + :param time_limit: Time limit in seconds. If -1, no time limit is set. + :type time_limit: double + """ + self.thisptr.fix_inconsistencies_using_perturbation(max_perturb, + time_limit) + + def set_max_squared_edge_length(self, max_squared_edge_length): + """Sets the maximal possible squared edge length for the edges in the + triangulations. + + :param max_squared_edge_length: Maximal possible squared edge length. + :type max_squared_edge_length: double + + If the maximal edge length value is too low + :func:`~gudhi.Tangential_complex.compute_tangential_complex` + will throw an exception in debug mode. + """ + self.thisptr.set_max_squared_edge_length(max_squared_edge_length) diff --git a/src/python/gudhi/witness_complex.pyx b/src/python/gudhi/witness_complex.pyx new file mode 100644 index 00000000..baa70b7a --- /dev/null +++ b/src/python/gudhi/witness_complex.pyx @@ -0,0 +1,78 @@ +from cython cimport numeric +from libcpp.vector cimport vector +from libcpp.utility cimport pair +from libc.stdint cimport intptr_t + +from gudhi.simplex_tree cimport * +from gudhi.simplex_tree import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +cdef extern from "Witness_complex_interface.h" namespace "Gudhi": + cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface": + Witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, + unsigned limit_dimension) + +# WitnessComplex python interface +cdef class WitnessComplex: + """Constructs (weak) witness complex for a given table of nearest landmarks + with respect to witnesses. + """ + + cdef Witness_complex_interface * thisptr + + # Fake constructor that does nothing but documenting the constructor + def __init__(self, nearest_landmark_table=None): + """WitnessComplex constructor. + + :param nearest_landmark_table: A list of lists of nearest landmarks and their distances. + `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to + witness w, and d is the (squared) distance between l and w. + :type nearest_landmark_table: list of list of pair of int and float + """ + + # The real cython constructor + def __cinit__(self, nearest_landmark_table=None): + if nearest_landmark_table is not None: + self.thisptr = new Witness_complex_interface(nearest_landmark_table) + + def __dealloc__(self): + if self.thisptr != NULL: + del self.thisptr + + def __is_defined(self): + """Returns true if WitnessComplex pointer is not NULL. + """ + return self.thisptr != NULL + + def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1): + """ + :param max_alpha_square: The maximum relaxation parameter. + Default is set to infinity. + :type max_alpha_square: float + :returns: A simplex tree created from the Delaunay Triangulation. + :rtype: SimplexTree + """ + stree = SimplexTree() + cdef intptr_t stree_int_ptr=stree.thisptr + if limit_dimension is not -1: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square, limit_dimension) + else: + self.thisptr.create_simplex_tree(stree_int_ptr, + max_alpha_square) + return stree diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h new file mode 100644 index 00000000..1199b741 --- /dev/null +++ b/src/python/include/Alpha_complex_interface.h @@ -0,0 +1,72 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_ALPHA_COMPLEX_INTERFACE_H_ +#define INCLUDE_ALPHA_COMPLEX_INTERFACE_H_ + +#include +#include +#include + +#include "Simplex_tree_interface.h" + +#include +#include +#include + +namespace Gudhi { + +namespace alpha_complex { + +class Alpha_complex_interface { + using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; + using Point_d = Dynamic_kernel::Point_d; + + public: + Alpha_complex_interface(const std::vector>& points) { + alpha_complex_ = new Alpha_complex(points); + } + + Alpha_complex_interface(const std::string& off_file_name, bool from_file = true) { + alpha_complex_ = new Alpha_complex(off_file_name); + } + + ~Alpha_complex_interface() { + delete alpha_complex_; + } + + std::vector get_point(int vh) { + std::vector vd; + try { + Point_d ph = alpha_complex_->get_point(vh); + for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) + vd.push_back(*coord); + } catch (std::out_of_range const&) { + // std::out_of_range is thrown in case not found. Other exceptions must be re-thrown + } + return vd; + } + + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square) { + alpha_complex_->create_complex(*simplex_tree, max_alpha_square); + simplex_tree->initialize_filtration(); + } + + private: + Alpha_complex* alpha_complex_; +}; + +} // namespace alpha_complex + +} // namespace Gudhi + +#endif // INCLUDE_ALPHA_COMPLEX_INTERFACE_H_ diff --git a/src/python/include/Bottleneck_distance_interface.h b/src/python/include/Bottleneck_distance_interface.h new file mode 100644 index 00000000..22c9a97a --- /dev/null +++ b/src/python/include/Bottleneck_distance_interface.h @@ -0,0 +1,43 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_ +#define INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_ + +#include + +#include +#include +#include // for std::pair + +namespace Gudhi { + +namespace persistence_diagram { + + // bottleneck_distance function renamed for the python function can be called bottleneck_dstance + double bottleneck(const std::vector>& diag1, + const std::vector>& diag2, + double e) { + return bottleneck_distance(diag1, diag2, e); + } + + double bottleneck(const std::vector>& diag1, + const std::vector>& diag2) { + return bottleneck_distance(diag1, diag2); + } + +} // namespace persistence_diagram + +} // namespace Gudhi + + +#endif // INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_ diff --git a/src/python/include/Cubical_complex_interface.h b/src/python/include/Cubical_complex_interface.h new file mode 100644 index 00000000..7d32914c --- /dev/null +++ b/src/python/include/Cubical_complex_interface.h @@ -0,0 +1,52 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_ +#define INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_ + +#include +#include +#include + +#include +#include +#include + +namespace Gudhi { + +namespace cubical_complex { + +template> +class Cubical_complex_interface : public Bitmap_cubical_complex { + public: + Cubical_complex_interface(const std::vector& dimensions, + const std::vector& top_dimensional_cells) + : Bitmap_cubical_complex(dimensions, top_dimensional_cells) { + } + + Cubical_complex_interface(const std::vector& dimensions, + const std::vector& top_dimensional_cells, + const std::vector& periodic_dimensions) + : Bitmap_cubical_complex(dimensions, top_dimensional_cells, periodic_dimensions) { + } + + Cubical_complex_interface(const std::string& perseus_file) + : Bitmap_cubical_complex(perseus_file.c_str()) { + } +}; + +} // namespace cubical_complex + +} // namespace Gudhi + +#endif // INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_ + diff --git a/src/python/include/Euclidean_strong_witness_complex_interface.h b/src/python/include/Euclidean_strong_witness_complex_interface.h new file mode 100644 index 00000000..90bd54ac --- /dev/null +++ b/src/python/include/Euclidean_strong_witness_complex_interface.h @@ -0,0 +1,83 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_ +#define INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_ + +#include +#include + +#include "Simplex_tree_interface.h" + +#include + +#include +#include // std::pair +#include +#include + +namespace Gudhi { + +namespace witness_complex { + + +class Euclidean_strong_witness_complex_interface { + using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; + using Point_d = Dynamic_kernel::Point_d; + + typedef typename Simplex_tree<>::Simplex_key Simplex_key; + + public: + Euclidean_strong_witness_complex_interface(const std::vector>& landmarks, + const std::vector>& witnesses) { + landmarks_.reserve(landmarks.size()); + for (auto& landmark : landmarks) + landmarks_.emplace_back(landmark.begin(), landmark.end()); + witness_complex_ = new Euclidean_strong_witness_complex(landmarks_, witnesses); + } + + ~Euclidean_strong_witness_complex_interface() { + delete witness_complex_; + } + + void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square, + std::size_t limit_dimension) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); + simplex_tree->initialize_filtration(); + } + + void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square); + simplex_tree->initialize_filtration(); + } + + std::vector get_point(unsigned vh) { + std::vector vd; + if (vh < landmarks_.size()) { + Point_d ph = witness_complex_->get_point(vh); + for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) + vd.push_back(*coord); + } + return vd; + } + + private: + std::vector landmarks_; + Euclidean_strong_witness_complex* witness_complex_; +}; + +} // namespace witness_complex + +} // namespace Gudhi + +#endif // INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_ + diff --git a/src/python/include/Euclidean_witness_complex_interface.h b/src/python/include/Euclidean_witness_complex_interface.h new file mode 100644 index 00000000..0c01a741 --- /dev/null +++ b/src/python/include/Euclidean_witness_complex_interface.h @@ -0,0 +1,82 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_ +#define INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_ + +#include +#include + +#include "Simplex_tree_interface.h" + +#include + +#include +#include // std::pair +#include +#include + +namespace Gudhi { + +namespace witness_complex { + + +class Euclidean_witness_complex_interface { + using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; + using Point_d = Dynamic_kernel::Point_d; + + typedef typename Simplex_tree<>::Simplex_key Simplex_key; + + public: + Euclidean_witness_complex_interface(const std::vector>& landmarks, + const std::vector>& witnesses) { + landmarks_.reserve(landmarks.size()); + for (auto& landmark : landmarks) + landmarks_.emplace_back(landmark.begin(), landmark.end()); + witness_complex_ = new Euclidean_witness_complex(landmarks_, witnesses); + } + + ~Euclidean_witness_complex_interface() { + delete witness_complex_; + } + + void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square, std::size_t limit_dimension) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); + simplex_tree->initialize_filtration(); + } + + void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square); + simplex_tree->initialize_filtration(); + } + + std::vector get_point(unsigned vh) { + std::vector vd; + if (vh < landmarks_.size()) { + Point_d ph = witness_complex_->get_point(vh); + for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) + vd.push_back(*coord); + } + return vd; + } + + private: + std::vector landmarks_; + Euclidean_witness_complex* witness_complex_; +}; + +} // namespace witness_complex + +} // namespace Gudhi + +#endif // INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_ + diff --git a/src/python/include/Nerve_gic_interface.h b/src/python/include/Nerve_gic_interface.h new file mode 100644 index 00000000..729b39fb --- /dev/null +++ b/src/python/include/Nerve_gic_interface.h @@ -0,0 +1,51 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2018 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_NERVE_GIC_INTERFACE_H_ +#define INCLUDE_NERVE_GIC_INTERFACE_H_ + +#include +#include +#include + +#include "Simplex_tree_interface.h" + +#include +#include +#include + +namespace Gudhi { + +namespace cover_complex { + +class Nerve_gic_interface : public Cover_complex> { + public: + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree) { + create_complex(*simplex_tree); + simplex_tree->initialize_filtration(); + } + void set_cover_from_Euclidean_Voronoi(int m) { + set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m); + } + double set_graph_from_automatic_euclidean_rips(int N) { + return set_graph_from_automatic_rips(Gudhi::Euclidean_distance(), N); + } + void set_graph_from_euclidean_rips(double threshold) { + set_graph_from_rips(threshold, Gudhi::Euclidean_distance()); + } +}; + +} // namespace cover_complex + +} // namespace Gudhi + +#endif // INCLUDE_NERVE_GIC_INTERFACE_H_ diff --git a/src/python/include/Off_reader_interface.h b/src/python/include/Off_reader_interface.h new file mode 100644 index 00000000..4b3643be --- /dev/null +++ b/src/python/include/Off_reader_interface.h @@ -0,0 +1,32 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_OFF_READER_INTERFACE_H_ +#define INCLUDE_OFF_READER_INTERFACE_H_ + +#include + +#include +#include +#include + +namespace Gudhi { + +std::vector> read_points_from_OFF_file(const std::string& off_file) { + Gudhi::Points_off_reader> off_reader(off_file); + return off_reader.get_point_cloud(); +} + +} // namespace Gudhi + +#endif // INCLUDE_OFF_READER_INTERFACE_H_ + diff --git a/src/python/include/Persistent_cohomology_interface.h b/src/python/include/Persistent_cohomology_interface.h new file mode 100644 index 00000000..64e2ddc8 --- /dev/null +++ b/src/python/include/Persistent_cohomology_interface.h @@ -0,0 +1,111 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ +#define INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ + +#include + +#include +#include // for std::pair +#include // for sort + +namespace Gudhi { + +template +class Persistent_cohomology_interface : public +persistent_cohomology::Persistent_cohomology { + private: + /* + * Compare two intervals by dimension, then by length. + */ + struct cmp_intervals_by_dim_then_length { + explicit cmp_intervals_by_dim_then_length(FilteredComplex * sc) + : sc_(sc) { } + + template + bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) { + if (sc_->dimension(get < 0 > (p1)) == sc_->dimension(get < 0 > (p2))) + return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1)) + > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2))); + else + return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2))); + } + FilteredComplex* sc_; + }; + + public: + Persistent_cohomology_interface(FilteredComplex* stptr) + : persistent_cohomology::Persistent_cohomology(*stptr), + stptr_(stptr) { } + + Persistent_cohomology_interface(FilteredComplex* stptr, bool persistence_dim_max) + : persistent_cohomology::Persistent_cohomology(*stptr, persistence_dim_max), + stptr_(stptr) { } + + std::vector>> get_persistence(int homology_coeff_field, + double min_persistence) { + persistent_cohomology::Persistent_cohomology::init_coefficients(homology_coeff_field); + persistent_cohomology::Persistent_cohomology::compute_persistent_cohomology(min_persistence); + + // Custom sort and output persistence + cmp_intervals_by_dim_then_length cmp(stptr_); + auto persistent_pairs = persistent_cohomology::Persistent_cohomology::get_persistent_pairs(); + std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp); + + std::vector>> persistence; + for (auto pair : persistent_pairs) { + persistence.push_back(std::make_pair(stptr_->dimension(get<0>(pair)), + std::make_pair(stptr_->filtration(get<0>(pair)), + stptr_->filtration(get<1>(pair))))); + } + return persistence; + } + + std::vector, std::vector>> persistence_pairs() { + auto pairs = persistent_cohomology::Persistent_cohomology::get_persistent_pairs(); + + std::vector, std::vector>> persistence_pairs; + persistence_pairs.reserve(pairs.size()); + for (auto pair : pairs) { + std::vector birth; + if (get<0>(pair) != stptr_->null_simplex()) { + for (auto vertex : stptr_->simplex_vertex_range(get<0>(pair))) { + birth.push_back(vertex); + } + } + + std::vector death; + if (get<1>(pair) != stptr_->null_simplex()) { + for (auto vertex : stptr_->simplex_vertex_range(get<1>(pair))) { + death.push_back(vertex); + } + } + + persistence_pairs.push_back(std::make_pair(birth, death)); + } + return persistence_pairs; + } + + private: + // A copy + FilteredComplex* stptr_; +}; + +} // namespace Gudhi + +#endif // INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ diff --git a/src/python/include/Reader_utils_interface.h b/src/python/include/Reader_utils_interface.h new file mode 100644 index 00000000..5bddf9ce --- /dev/null +++ b/src/python/include/Reader_utils_interface.h @@ -0,0 +1,46 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2017 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_READER_UTILS_INTERFACE_H_ +#define INCLUDE_READER_UTILS_INTERFACE_H_ + +#include + +#include +#include +#include +#include +#include // for pair<> + +namespace Gudhi { + +// Redefine functions with a different name in order the original name can be used in the Python version. +std::vector> read_matrix_from_csv_file(const std::string& filename, + const char separator = ';') { + return read_lower_triangular_matrix_from_csv_file(filename, separator); +} + +inline std::map>> + read_pers_intervals_grouped_by_dimension(std::string const& filename) { + return read_persistence_intervals_grouped_by_dimension(filename); +} + +inline std::vector> + read_pers_intervals_in_dimension(std::string const& filename, int only_this_dim = -1) { + return read_persistence_intervals_in_dimension(filename, only_this_dim); +} + + +} // namespace Gudhi + + +#endif // INCLUDE_READER_UTILS_INTERFACE_H_ diff --git a/src/python/include/Rips_complex_interface.h b/src/python/include/Rips_complex_interface.h new file mode 100644 index 00000000..f818a2ed --- /dev/null +++ b/src/python/include/Rips_complex_interface.h @@ -0,0 +1,72 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_RIPS_COMPLEX_INTERFACE_H_ +#define INCLUDE_RIPS_COMPLEX_INTERFACE_H_ + +#include +#include +#include +#include + +#include + +#include "Simplex_tree_interface.h" + +#include +#include +#include // std::pair +#include + +namespace Gudhi { + +namespace rips_complex { + +class Rips_complex_interface { + using Point_d = std::vector; + using Distance_matrix = std::vector::Filtration_value>>; + + public: + void init_points(const std::vector>& points, double threshold) { + rips_complex_.emplace(points, threshold, Gudhi::Euclidean_distance()); + } + void init_matrix(const std::vector>& matrix, double threshold) { + rips_complex_.emplace(matrix, threshold); + } + + void init_points_sparse(const std::vector>& points, double threshold, double epsilon) { + sparse_rips_complex_.emplace(points, Gudhi::Euclidean_distance(), epsilon, -std::numeric_limits::infinity(), threshold); + } + void init_matrix_sparse(const std::vector>& matrix, double threshold, double epsilon) { + sparse_rips_complex_.emplace(matrix, epsilon, -std::numeric_limits::infinity(), threshold); + } + + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, int dim_max) { + if (rips_complex_) + rips_complex_->create_complex(*simplex_tree, dim_max); + else + sparse_rips_complex_->create_complex(*simplex_tree, dim_max); + simplex_tree->initialize_filtration(); + } + + private: + // std::variant would work, but we don't require C++17 yet, and boost::variant is not super convenient. + // Anyway, storing a graph would make more sense. Or changing the interface completely so there is no such storage. + boost::optional::Filtration_value>> rips_complex_; + boost::optional::Filtration_value>> sparse_rips_complex_; +}; + +} // namespace rips_complex + +} // namespace Gudhi + +#endif // INCLUDE_RIPS_COMPLEX_INTERFACE_H_ diff --git a/src/python/include/Simplex_tree_interface.h b/src/python/include/Simplex_tree_interface.h new file mode 100644 index 00000000..c15a44a5 --- /dev/null +++ b/src/python/include/Simplex_tree_interface.h @@ -0,0 +1,144 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_SIMPLEX_TREE_INTERFACE_H_ +#define INCLUDE_SIMPLEX_TREE_INTERFACE_H_ + +#include +#include +#include +#include + +#include "Persistent_cohomology_interface.h" + +#include +#include +#include // std::pair + +namespace Gudhi { + +template +class Simplex_tree_interface : public Simplex_tree { + public: + using Base = Simplex_tree; + using Filtration_value = typename Base::Filtration_value; + using Vertex_handle = typename Base::Vertex_handle; + using Simplex_handle = typename Base::Simplex_handle; + using Insertion_result = typename std::pair; + using Simplex = std::vector; + using Filtered_simplices = std::vector>; + + public: + bool find_simplex(const Simplex& vh) { + return (Base::find(vh) != Base::null_simplex()); + } + + void assign_simplex_filtration(const Simplex& vh, Filtration_value filtration) { + Base::assign_filtration(Base::find(vh), filtration); + } + + bool insert(const Simplex& simplex, Filtration_value filtration = 0) { + Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration); + return (result.second); + } + + // Do not interface this function, only used in alpha complex interface for complex creation + bool insert_simplex(const Simplex& simplex, Filtration_value filtration = 0) { + Insertion_result result = Base::insert_simplex(simplex, filtration); + return (result.second); + } + + // Do not interface this function, only used in interface for complex creation + bool insert_simplex_and_subfaces(const Simplex& simplex, Filtration_value filtration = 0) { + Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration); + return (result.second); + } + + // Do not interface this function, only used in strong witness interface for complex creation + bool insert_simplex(const std::vector& simplex, Filtration_value filtration = 0) { + Insertion_result result = Base::insert_simplex(simplex, filtration); + return (result.second); + } + + // Do not interface this function, only used in strong witness interface for complex creation + bool insert_simplex_and_subfaces(const std::vector& simplex, Filtration_value filtration = 0) { + Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration); + return (result.second); + } + + Filtration_value simplex_filtration(const Simplex& simplex) { + return Base::filtration(Base::find(simplex)); + } + + void remove_maximal_simplex(const Simplex& simplex) { + Base::remove_maximal_simplex(Base::find(simplex)); + Base::initialize_filtration(); + } + + Filtered_simplices get_filtration() { + Base::initialize_filtration(); + Filtered_simplices filtrations; + for (auto f_simplex : Base::filtration_simplex_range()) { + Simplex simplex; + for (auto vertex : Base::simplex_vertex_range(f_simplex)) { + simplex.insert(simplex.begin(), vertex); + } + filtrations.push_back(std::make_pair(simplex, Base::filtration(f_simplex))); + } + return filtrations; + } + + Filtered_simplices get_skeleton(int dimension) { + Filtered_simplices skeletons; + for (auto f_simplex : Base::skeleton_simplex_range(dimension)) { + Simplex simplex; + for (auto vertex : Base::simplex_vertex_range(f_simplex)) { + simplex.insert(simplex.begin(), vertex); + } + skeletons.push_back(std::make_pair(simplex, Base::filtration(f_simplex))); + } + return skeletons; + } + + Filtered_simplices get_star(const Simplex& simplex) { + Filtered_simplices star; + for (auto f_simplex : Base::star_simplex_range(Base::find(simplex))) { + Simplex simplex_star; + for (auto vertex : Base::simplex_vertex_range(f_simplex)) { + simplex_star.insert(simplex_star.begin(), vertex); + } + star.push_back(std::make_pair(simplex_star, Base::filtration(f_simplex))); + } + return star; + } + + Filtered_simplices get_cofaces(const Simplex& simplex, int dimension) { + Filtered_simplices cofaces; + for (auto f_simplex : Base::cofaces_simplex_range(Base::find(simplex), dimension)) { + Simplex simplex_coface; + for (auto vertex : Base::simplex_vertex_range(f_simplex)) { + simplex_coface.insert(simplex_coface.begin(), vertex); + } + cofaces.push_back(std::make_pair(simplex_coface, Base::filtration(f_simplex))); + } + return cofaces; + } + + void create_persistence(Gudhi::Persistent_cohomology_interface* pcoh) { + Base::initialize_filtration(); + pcoh = new Gudhi::Persistent_cohomology_interface(*this); + } +}; + +} // namespace Gudhi + +#endif // INCLUDE_SIMPLEX_TREE_INTERFACE_H_ diff --git a/src/python/include/Strong_witness_complex_interface.h b/src/python/include/Strong_witness_complex_interface.h new file mode 100644 index 00000000..4c333da8 --- /dev/null +++ b/src/python/include/Strong_witness_complex_interface.h @@ -0,0 +1,63 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_ +#define INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_ + +#include +#include + +#include "Simplex_tree_interface.h" + +#include +#include // std::pair +#include +#include + +namespace Gudhi { + +namespace witness_complex { + +class Strong_witness_complex_interface { + using Nearest_landmark_range = std::vector>; + using Nearest_landmark_table = std::vector; + + public: + Strong_witness_complex_interface(const Nearest_landmark_table& nlt) { + witness_complex_ = new Strong_witness_complex(nlt); + } + + ~Strong_witness_complex_interface() { + delete witness_complex_; + } + + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, + std::size_t limit_dimension) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); + simplex_tree->initialize_filtration(); + } + + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, + double max_alpha_square) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square); + simplex_tree->initialize_filtration(); + } + + private: + Strong_witness_complex* witness_complex_; +}; + +} // namespace witness_complex + +} // namespace Gudhi + +#endif // INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_ diff --git a/src/python/include/Subsampling_interface.h b/src/python/include/Subsampling_interface.h new file mode 100644 index 00000000..bc390485 --- /dev/null +++ b/src/python/include/Subsampling_interface.h @@ -0,0 +1,109 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_SUBSAMPLING_INTERFACE_H_ +#define INCLUDE_SUBSAMPLING_INTERFACE_H_ + +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace Gudhi { + +namespace subsampling { + +using Subsampling_dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; +using Subsampling_point_d = Subsampling_dynamic_kernel::Point_d; +using Subsampling_ft = Subsampling_dynamic_kernel::FT; + +// ------ choose_n_farthest_points ------ +std::vector> subsampling_n_farthest_points(const std::vector>& points, + unsigned nb_points) { + std::vector> landmarks; + Subsampling_dynamic_kernel k; + choose_n_farthest_points(k, points, nb_points, random_starting_point, std::back_inserter(landmarks)); + + return landmarks; +} + +std::vector> subsampling_n_farthest_points(const std::vector>& points, + unsigned nb_points, unsigned starting_point) { + std::vector> landmarks; + Subsampling_dynamic_kernel k; + choose_n_farthest_points(k, points, nb_points, starting_point, std::back_inserter(landmarks)); + + return landmarks; +} + +std::vector> subsampling_n_farthest_points_from_file(const std::string& off_file, + unsigned nb_points) { + Gudhi::Points_off_reader> off_reader(off_file); + std::vector> points = off_reader.get_point_cloud(); + return subsampling_n_farthest_points(points, nb_points); +} + +std::vector> subsampling_n_farthest_points_from_file(const std::string& off_file, + unsigned nb_points, unsigned starting_point) { + Gudhi::Points_off_reader> off_reader(off_file); + std::vector> points = off_reader.get_point_cloud(); + return subsampling_n_farthest_points(points, nb_points, starting_point); +} + +// ------ pick_n_random_points ------ +std::vector> subsampling_n_random_points(const std::vector>& points, + unsigned nb_points) { + std::vector> landmarks; + pick_n_random_points(points, nb_points, std::back_inserter(landmarks)); + + return landmarks; +} + +std::vector> subsampling_n_random_points_from_file(const std::string& off_file, + unsigned nb_points) { + Gudhi::Points_off_reader> off_reader(off_file); + std::vector> points = off_reader.get_point_cloud(); + return subsampling_n_random_points(points, nb_points); +} + +// ------ sparsify_point_set ------ +std::vector> subsampling_sparsify_points(const std::vector>& points, + double min_squared_dist) { + std::vector input, output; + for (auto point : points) + input.push_back(Subsampling_point_d(point.size(), point.begin(), point.end())); + Subsampling_dynamic_kernel k; + sparsify_point_set(k, input, min_squared_dist, std::back_inserter(output)); + + std::vector> landmarks; + for (auto point : output) + landmarks.push_back(std::vector(point.cartesian_begin(), point.cartesian_end())); + return landmarks; +} + +std::vector> subsampling_sparsify_points_from_file(const std::string& off_file, + double min_squared_dist) { + Gudhi::Points_off_reader> off_reader(off_file); + std::vector> points = off_reader.get_point_cloud(); + return subsampling_sparsify_points(points, min_squared_dist); +} + +} // namespace subsampling + +} // namespace Gudhi + +#endif // INCLUDE_SUBSAMPLING_INTERFACE_H_ diff --git a/src/python/include/Tangential_complex_interface.h b/src/python/include/Tangential_complex_interface.h new file mode 100644 index 00000000..7c3f2789 --- /dev/null +++ b/src/python/include/Tangential_complex_interface.h @@ -0,0 +1,111 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_ +#define INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_ + +#include +#include +#include +#include + +#include "Simplex_tree_interface.h" + +#include +#include // std::pair +#include +#include + +namespace Gudhi { + +namespace tangential_complex { + +class Tangential_complex_interface { + using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; + using Point_d = Dynamic_kernel::Point_d; + using TC = Tangential_complex; + + public: + Tangential_complex_interface(int intrisic_dim, const std::vector>& points) { + Dynamic_kernel k; + + tangential_complex_ = new TC(points, intrisic_dim, k); + } + + Tangential_complex_interface(int intrisic_dim, const std::string& off_file_name, bool from_file = true) { + Dynamic_kernel k; + + Gudhi::Points_off_reader off_reader(off_file_name); + std::vector points = off_reader.get_point_cloud(); + + tangential_complex_ = new TC(points, intrisic_dim, k); + } + + ~Tangential_complex_interface() { + delete tangential_complex_; + } + + void compute_tangential_complex() { + tangential_complex_->compute_tangential_complex(); + num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices(); + } + + std::vector get_point(unsigned vh) { + std::vector vd; + if (vh < tangential_complex_->number_of_vertices()) { + Point_d ph = tangential_complex_->get_point(vh); + for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++) + vd.push_back(*coord); + } + return vd; + } + + unsigned number_of_vertices() { + return tangential_complex_->number_of_vertices(); + } + + unsigned number_of_simplices() { + return num_inconsistencies_.num_simplices; + } + + unsigned number_of_inconsistent_simplices() { + return num_inconsistencies_.num_inconsistent_simplices; + } + + unsigned number_of_inconsistent_stars() { + return num_inconsistencies_.num_inconsistent_stars; + } + + void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) { + tangential_complex_->fix_inconsistencies_using_perturbation(max_perturb, time_limit); + num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices(); + } + + void create_simplex_tree(Simplex_tree<>* simplex_tree) { + tangential_complex_->create_complex>(*simplex_tree); + simplex_tree->initialize_filtration(); + } + + void set_max_squared_edge_length(double max_squared_edge_length) { + tangential_complex_->set_max_squared_edge_length(max_squared_edge_length); + } + +private: + TC* tangential_complex_; + TC::Num_inconsistencies num_inconsistencies_; +}; + +} // namespace tangential_complex + +} // namespace Gudhi + +#endif // INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_ diff --git a/src/python/include/Witness_complex_interface.h b/src/python/include/Witness_complex_interface.h new file mode 100644 index 00000000..609277d6 --- /dev/null +++ b/src/python/include/Witness_complex_interface.h @@ -0,0 +1,64 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2016 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef INCLUDE_WITNESS_COMPLEX_INTERFACE_H_ +#define INCLUDE_WITNESS_COMPLEX_INTERFACE_H_ + +#include +#include + +#include "Simplex_tree_interface.h" + +#include +#include // std::pair +#include +#include + +namespace Gudhi { + +namespace witness_complex { + +class Witness_complex_interface { + using Nearest_landmark_range = std::vector>; + using Nearest_landmark_table = std::vector; + + public: + Witness_complex_interface(const Nearest_landmark_table& nlt) { + witness_complex_ = new Witness_complex(nlt); + } + + ~Witness_complex_interface() { + delete witness_complex_; + } + + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, + std::size_t limit_dimension) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension); + simplex_tree->initialize_filtration(); + } + + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, + double max_alpha_square) { + witness_complex_->create_complex(*simplex_tree, max_alpha_square); + simplex_tree->initialize_filtration(); + } + + private: + Witness_complex* witness_complex_; +}; + +} // namespace witness_complex + +} // namespace Gudhi + +#endif // INCLUDE_WITNESS_COMPLEX_INTERFACE_H_ + diff --git a/src/python/setup.py.in b/src/python/setup.py.in new file mode 100644 index 00000000..3f1d4424 --- /dev/null +++ b/src/python/setup.py.in @@ -0,0 +1,53 @@ +from setuptools import setup, Extension +from Cython.Build import cythonize +from numpy import get_include as numpy_get_include + +"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2019 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + +modules = [@GUDHI_PYTHON_MODULES_TO_COMPILE@] + +source_dir='@CMAKE_CURRENT_SOURCE_DIR@/gudhi/' +extra_compile_args=[@GUDHI_PYTHON_EXTRA_COMPILE_ARGS@] +extra_link_args=[@GUDHI_PYTHON_EXTRA_LINK_ARGS@] +libraries=[@GUDHI_PYTHON_LIBRARIES@] +library_dirs=[@GUDHI_PYTHON_LIBRARY_DIRS@] +include_dirs = [numpy_get_include(), '@CMAKE_CURRENT_SOURCE_DIR@/gudhi/', @GUDHI_PYTHON_INCLUDE_DIRS@] +runtime_library_dirs=[@GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS@] + +# Create ext_modules list from module list +ext_modules = [] +for module in modules: + ext_modules.append(Extension( + 'gudhi.' + module, + sources = [source_dir + module + '.pyx',], + language = 'c++', + extra_compile_args=extra_compile_args, + extra_link_args=extra_link_args, + libraries=libraries, + library_dirs=library_dirs, + include_dirs=include_dirs, + runtime_library_dirs=runtime_library_dirs,)) + +setup( + name = 'gudhi', + packages=["gudhi",], + author='GUDHI Editorial Board', + author_email='gudhi-contact@lists.gforge.inria.fr', + version='@GUDHI_VERSION@', + url='http://gudhi.gforge.inria.fr/', + ext_modules = cythonize(ext_modules), + install_requires = ['cython','numpy >= 1.9',], + setup_requires = ['numpy >= 1.9',], +) diff --git a/src/python/test/test_alpha_complex.py b/src/python/test/test_alpha_complex.py new file mode 100755 index 00000000..24f8bf53 --- /dev/null +++ b/src/python/test/test_alpha_complex.py @@ -0,0 +1,90 @@ +from gudhi import AlphaComplex, SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_empty_alpha(): + alpha_complex = AlphaComplex(points=[[0, 0]]) + assert alpha_complex.__is_defined() == True + + +def test_infinite_alpha(): + point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] + alpha_complex = AlphaComplex(points=point_list) + assert alpha_complex.__is_defined() == True + + simplex_tree = alpha_complex.create_simplex_tree() + assert simplex_tree.__is_persistence_defined() == False + + assert simplex_tree.num_simplices() == 11 + assert simplex_tree.num_vertices() == 4 + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 0.25), + ([0, 2], 0.25), + ([1, 3], 0.25), + ([2, 3], 0.25), + ([1, 2], 0.5), + ([0, 1, 2], 0.5), + ([1, 2, 3], 0.5), + ] + assert simplex_tree.get_star([0]) == [ + ([0], 0.0), + ([0, 1], 0.25), + ([0, 1, 2], 0.5), + ([0, 2], 0.25), + ] + assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)] + + assert point_list[0] == alpha_complex.get_point(0) + assert point_list[1] == alpha_complex.get_point(1) + assert point_list[2] == alpha_complex.get_point(2) + assert point_list[3] == alpha_complex.get_point(3) + assert alpha_complex.get_point(4) == [] + assert alpha_complex.get_point(125) == [] + + +def test_filtered_alpha(): + point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] + filtered_alpha = AlphaComplex(points=point_list) + + simplex_tree = filtered_alpha.create_simplex_tree(max_alpha_square=0.25) + + assert simplex_tree.num_simplices() == 8 + assert simplex_tree.num_vertices() == 4 + + assert point_list[0] == filtered_alpha.get_point(0) + assert point_list[1] == filtered_alpha.get_point(1) + assert point_list[2] == filtered_alpha.get_point(2) + assert point_list[3] == filtered_alpha.get_point(3) + assert filtered_alpha.get_point(4) == [] + assert filtered_alpha.get_point(125) == [] + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 0.25), + ([0, 2], 0.25), + ([1, 3], 0.25), + ([2, 3], 0.25), + ] + assert simplex_tree.get_star([0]) == [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)] + assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)] diff --git a/src/python/test/test_bottleneck_distance.py b/src/python/test/test_bottleneck_distance.py new file mode 100755 index 00000000..f5f019b9 --- /dev/null +++ b/src/python/test/test_bottleneck_distance.py @@ -0,0 +1,23 @@ +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_basic_bottleneck(): + diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]] + diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]] + + assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == 0.8081763781405569 + assert gudhi.bottleneck_distance(diag1, diag2) == 0.75 diff --git a/src/python/test/test_cover_complex.py b/src/python/test/test_cover_complex.py new file mode 100755 index 00000000..8cd12272 --- /dev/null +++ b/src/python/test/test_cover_complex.py @@ -0,0 +1,85 @@ +from gudhi import CoverComplex + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2018 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2018 Inria" +__license__ = "MIT" + + +def test_empty_constructor(): + # Try to create an empty CoverComplex + cover = CoverComplex() + assert cover.__is_defined() == True + + +def test_non_existing_file_read(): + # Try to open a non existing file + cover = CoverComplex() + assert cover.read_point_cloud("pouetpouettralala.toubiloubabdou") == False + + +def test_files_creation(): + # Create test file + cloud_file = open("cloud", "w") + cloud_file.write("nOFF\n3\n3 0 0\n0 0 0\n2 1 0\n4 0 0") + cloud_file.close() + cover_file = open("cover", "w") + cover_file.write("1\n2\n3") + cover_file.close() + graph_file = open("graph", "w") + graph_file.write("0 1\n0 2\n1 2") + graph_file.close() + + +def test_nerve(): + nerve = CoverComplex() + nerve.set_type("Nerve") + assert nerve.read_point_cloud("cloud") == True + nerve.set_color_from_coordinate() + nerve.set_graph_from_file("graph") + nerve.set_cover_from_file("cover") + nerve.find_simplices() + stree = nerve.create_simplex_tree() + + assert stree.num_vertices() == 3 + assert (stree.num_simplices() - stree.num_vertices()) == 0 + assert stree.dimension() == 0 + + +def test_graph_induced_complex(): + gic = CoverComplex() + gic.set_type("GIC") + assert gic.read_point_cloud("cloud") == True + gic.set_color_from_coordinate() + gic.set_graph_from_file("graph") + gic.set_cover_from_file("cover") + gic.find_simplices() + stree = gic.create_simplex_tree() + + assert stree.num_vertices() == 3 + assert (stree.num_simplices() - stree.num_vertices()) == 4 + assert stree.dimension() == 2 + + +def test_voronoi_graph_induced_complex(): + gic = CoverComplex() + gic.set_type("GIC") + assert gic.read_point_cloud("cloud") == True + gic.set_color_from_coordinate() + gic.set_graph_from_file("graph") + gic.set_cover_from_Voronoi(2) + gic.find_simplices() + stree = gic.create_simplex_tree() + + assert stree.num_vertices() == 2 + assert (stree.num_simplices() - stree.num_vertices()) == 1 + assert stree.dimension() == 1 diff --git a/src/python/test/test_cubical_complex.py b/src/python/test/test_cubical_complex.py new file mode 100755 index 00000000..68f54fbe --- /dev/null +++ b/src/python/test/test_cubical_complex.py @@ -0,0 +1,98 @@ +from gudhi import CubicalComplex + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_empty_constructor(): + # Try to create an empty CubicalComplex + cub = CubicalComplex() + assert cub.__is_defined() == False + assert cub.__is_persistence_defined() == False + + +def test_non_existing_perseus_file_constructor(): + # Try to open a non existing file + cub = CubicalComplex(perseus_file="pouetpouettralala.toubiloubabdou") + assert cub.__is_defined() == False + assert cub.__is_persistence_defined() == False + + +def test_dimension_or_perseus_file_constructor(): + # Create test file + test_file = open("CubicalOneSphere.txt", "w") + test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n") + test_file.close() + # CubicalComplex can be constructed from dimensions and + # top_dimensional_cells OR from a Perseus-style file name. + cub = CubicalComplex( + dimensions=[3, 3], + top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9], + perseus_file="CubicalOneSphere.txt", + ) + assert cub.__is_defined() == False + assert cub.__is_persistence_defined() == False + + cub = CubicalComplex( + top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9], + perseus_file="CubicalOneSphere.txt", + ) + assert cub.__is_defined() == False + assert cub.__is_persistence_defined() == False + + cub = CubicalComplex(dimensions=[3, 3], perseus_file="CubicalOneSphere.txt") + assert cub.__is_defined() == False + assert cub.__is_persistence_defined() == False + + +def test_dimension_simple_constructor(): + cub = CubicalComplex( + dimensions=[3, 3], top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9] + ) + assert cub.__is_defined() == True + assert cub.__is_persistence_defined() == False + assert cub.persistence() == [(0, (1.0, float("inf")))] + assert cub.__is_persistence_defined() == True + assert cub.betti_numbers() == [1, 0, 0] + assert cub.persistent_betti_numbers(0, 1000) == [0, 0, 0] + + +def test_user_case_simple_constructor(): + cub = CubicalComplex( + dimensions=[3, 3], + top_dimensional_cells=[float("inf"), 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + ) + assert cub.__is_defined() == True + assert cub.__is_persistence_defined() == False + assert cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))] + assert cub.__is_persistence_defined() == True + other_cub = CubicalComplex( + dimensions=[3, 3], + top_dimensional_cells=[1000.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + ) + assert other_cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))] + + +def test_dimension_file_constructor(): + # Create test file + test_file = open("CubicalOneSphere.txt", "w") + test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n") + test_file.close() + cub = CubicalComplex(perseus_file="CubicalOneSphere.txt") + assert cub.__is_defined() == True + assert cub.__is_persistence_defined() == False + assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float("inf")))] + assert cub.__is_persistence_defined() == True + assert cub.betti_numbers() == [1, 0, 0] + assert cub.persistent_betti_numbers(0, 1000) == [1, 0, 0] diff --git a/src/python/test/test_euclidean_witness_complex.py b/src/python/test/test_euclidean_witness_complex.py new file mode 100755 index 00000000..f5eae5fa --- /dev/null +++ b/src/python/test/test_euclidean_witness_complex.py @@ -0,0 +1,95 @@ +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_empty_euclidean_witness_complex(): + euclidean_witness = gudhi.EuclideanWitnessComplex() + assert euclidean_witness.__is_defined() == False + + +def test_witness_complex(): + point_cloud = [ + [1.0, 1.0], + [7.0, 0.0], + [4.0, 6.0], + [9.0, 6.0], + [0.0, 14.0], + [2.0, 19.0], + [9.0, 17.0], + ] + landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]] + euclidean_witness_complex = gudhi.EuclideanWitnessComplex( + landmarks=landmarks, witnesses=point_cloud + ) + simplex_tree = euclidean_witness_complex.create_simplex_tree(max_alpha_square=4.1) + + assert landmarks[0] == euclidean_witness_complex.get_point(0) + assert landmarks[1] == euclidean_witness_complex.get_point(1) + assert landmarks[2] == euclidean_witness_complex.get_point(2) + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([0, 1], 0.0), + ([2], 0.0), + ([0, 2], 0.0), + ([1, 2], 0.0), + ([0, 1, 2], 0.0), + ] + + +def test_empty_euclidean_strong_witness_complex(): + euclidean_strong_witness = gudhi.EuclideanStrongWitnessComplex() + assert euclidean_strong_witness.__is_defined() == False + + +def test_strong_witness_complex(): + point_cloud = [ + [1.0, 1.0], + [7.0, 0.0], + [4.0, 6.0], + [9.0, 6.0], + [0.0, 14.0], + [2.0, 19.0], + [9.0, 17.0], + ] + landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]] + euclidean_strong_witness_complex = gudhi.EuclideanStrongWitnessComplex( + landmarks=landmarks, witnesses=point_cloud + ) + simplex_tree = euclidean_strong_witness_complex.create_simplex_tree( + max_alpha_square=14.9 + ) + + assert landmarks[0] == euclidean_strong_witness_complex.get_point(0) + assert landmarks[1] == euclidean_strong_witness_complex.get_point(1) + assert landmarks[2] == euclidean_strong_witness_complex.get_point(2) + + assert simplex_tree.get_filtration() == [([0], 0.0), ([1], 0.0), ([2], 0.0)] + + simplex_tree = euclidean_strong_witness_complex.create_simplex_tree( + max_alpha_square=100.0 + ) + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([1, 2], 15.0), + ([0, 2], 34.0), + ([0, 1], 37.0), + ([0, 1, 2], 37.0), + ] diff --git a/src/python/test/test_reader_utils.py b/src/python/test/test_reader_utils.py new file mode 100755 index 00000000..4c7b32c2 --- /dev/null +++ b/src/python/test/test_reader_utils.py @@ -0,0 +1,126 @@ +import gudhi +import numpy as np + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2017 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2017 Inria" +__license__ = "MIT" + + +def test_non_existing_csv_file(): + # Try to open a non existing file + matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file="pouetpouettralala.toubiloubabdou" + ) + assert matrix == [] + + +def test_full_square_distance_matrix_csv_file(): + # Create test file + test_file = open("full_square_distance_matrix.csv", "w") + test_file.write("0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;") + test_file.close() + matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file="full_square_distance_matrix.csv" + ) + assert matrix == [[], [1.0], [2.0, 4.0], [3.0, 5.0, 6.0]] + + +def test_lower_triangular_distance_matrix_csv_file(): + # Create test file + test_file = open("lower_triangular_distance_matrix.csv", "w") + test_file.write("\n1,\n2,3,\n4,5,6,\n7,8,9,10,") + test_file.close() + matrix = gudhi.read_lower_triangular_matrix_from_csv_file( + csv_file="lower_triangular_distance_matrix.csv", separator="," + ) + assert matrix == [[], [1.0], [2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0, 10.0]] + + +def test_non_existing_persistence_file(): + # Try to open a non existing file + persistence = gudhi.read_persistence_intervals_grouped_by_dimension( + persistence_file="pouetpouettralala.toubiloubabdou" + ) + assert persistence == [] + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="pouetpouettralala.toubiloubabdou", only_this_dim=1 + ) + np.testing.assert_array_equal(persistence, []) + + +def test_read_persistence_intervals_without_dimension(): + # Create test file + test_file = open("persistence_intervals_without_dimension.pers", "w") + test_file.write( + "# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf" + ) + test_file.close() + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_without_dimension.pers" + ) + np.testing.assert_array_equal( + persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] + ) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=0 + ) + np.testing.assert_array_equal(persistence, []) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=1 + ) + np.testing.assert_array_equal(persistence, []) + persistence = gudhi.read_persistence_intervals_grouped_by_dimension( + persistence_file="persistence_intervals_without_dimension.pers" + ) + assert persistence == { + -1: [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] + } + + +def test_read_persistence_intervals_with_dimension(): + # Create test file + test_file = open("persistence_intervals_with_dimension.pers", "w") + test_file.write( + "# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf" + ) + test_file.close() + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers" + ) + np.testing.assert_array_equal( + persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))] + ) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=0 + ) + np.testing.assert_array_equal(persistence, [(2.7, 3.7)]) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=1 + ) + np.testing.assert_array_equal(persistence, [(9.6, 14.0), (3.0, float("Inf"))]) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=2 + ) + np.testing.assert_array_equal(persistence, []) + persistence = gudhi.read_persistence_intervals_in_dimension( + persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=3 + ) + np.testing.assert_array_equal(persistence, [(34.2, 34.974)]) + persistence = gudhi.read_persistence_intervals_grouped_by_dimension( + persistence_file="persistence_intervals_with_dimension.pers" + ) + assert persistence == { + 0: [(2.7, 3.7)], + 1: [(9.6, 14.0), (3.0, float("Inf"))], + 3: [(34.2, 34.974)], + } diff --git a/src/python/test/test_rips_complex.py b/src/python/test/test_rips_complex.py new file mode 100755 index 00000000..d55ae22f --- /dev/null +++ b/src/python/test/test_rips_complex.py @@ -0,0 +1,133 @@ +from gudhi import RipsComplex +from math import sqrt + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_empty_rips(): + rips_complex = RipsComplex() + + +def test_rips_from_points(): + point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] + rips_complex = RipsComplex(points=point_list, max_edge_length=42) + + simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) + + assert simplex_tree.__is_defined() == True + assert simplex_tree.__is_persistence_defined() == False + + assert simplex_tree.num_simplices() == 10 + assert simplex_tree.num_vertices() == 4 + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([1, 3], 1.0), + ([2, 3], 1.0), + ([1, 2], 1.4142135623730951), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_star([0]) == [ + ([0], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_cofaces([0], 1) == [ + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + + +def test_filtered_rips_from_points(): + point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] + filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0) + + simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) + + assert simplex_tree.__is_defined() == True + assert simplex_tree.__is_persistence_defined() == False + + assert simplex_tree.num_simplices() == 8 + assert simplex_tree.num_vertices() == 4 + + +def test_sparse_filtered_rips_from_points(): + point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] + filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0, sparse=0.001) + + simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) + + assert simplex_tree.__is_defined() == True + assert simplex_tree.__is_persistence_defined() == False + + assert simplex_tree.num_simplices() == 8 + assert simplex_tree.num_vertices() == 4 + + +def test_rips_from_distance_matrix(): + distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] + rips_complex = RipsComplex(distance_matrix=distance_matrix, max_edge_length=42) + + simplex_tree = rips_complex.create_simplex_tree(max_dimension=1) + + assert simplex_tree.__is_defined() == True + assert simplex_tree.__is_persistence_defined() == False + + assert simplex_tree.num_simplices() == 10 + assert simplex_tree.num_vertices() == 4 + + assert simplex_tree.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([1, 3], 1.0), + ([2, 3], 1.0), + ([1, 2], 1.4142135623730951), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_star([0]) == [ + ([0], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + assert simplex_tree.get_cofaces([0], 1) == [ + ([0, 1], 1.0), + ([0, 2], 1.0), + ([0, 3], 1.4142135623730951), + ] + + +def test_filtered_rips_from_distance_matrix(): + distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] + filtered_rips = RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0) + + simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1) + + assert simplex_tree.__is_defined() == True + assert simplex_tree.__is_persistence_defined() == False + + assert simplex_tree.num_simplices() == 8 + assert simplex_tree.num_vertices() == 4 diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py new file mode 100755 index 00000000..8d8971c1 --- /dev/null +++ b/src/python/test/test_simplex_tree.py @@ -0,0 +1,250 @@ +from gudhi import SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_insertion(): + st = SimplexTree() + assert st.__is_defined() == True + assert st.__is_persistence_defined() == False + + # insert test + assert st.insert([0, 1]) == True + + assert st.dimension() == 1 + + assert st.insert([0, 1, 2], filtration=4.0) == True + + assert st.dimension() == 2 + + assert st.num_simplices() == 7 + assert st.num_vertices() == 3 + + # find test + assert st.find([0, 1, 2]) == True + assert st.find([0, 1]) == True + assert st.find([0, 2]) == True + assert st.find([0]) == True + assert st.find([1]) == True + assert st.find([2]) == True + assert st.find([3]) == False + assert st.find([0, 3]) == False + assert st.find([1, 3]) == False + assert st.find([2, 3]) == False + + # filtration test + st.initialize_filtration() + assert st.filtration([0, 1, 2]) == 4.0 + assert st.filtration([0, 2]) == 4.0 + assert st.filtration([1, 2]) == 4.0 + assert st.filtration([2]) == 4.0 + assert st.filtration([0, 1]) == 0.0 + assert st.filtration([0]) == 0.0 + assert st.filtration([1]) == 0.0 + + # skeleton test + assert st.get_skeleton(2) == [ + ([0, 1, 2], 4.0), + ([0, 1], 0.0), + ([0, 2], 4.0), + ([0], 0.0), + ([1, 2], 4.0), + ([1], 0.0), + ([2], 4.0), + ] + assert st.get_skeleton(1) == [ + ([0, 1], 0.0), + ([0, 2], 4.0), + ([0], 0.0), + ([1, 2], 4.0), + ([1], 0.0), + ([2], 4.0), + ] + assert st.get_skeleton(0) == [([0], 0.0), ([1], 0.0), ([2], 4.0)] + + # remove_maximal_simplex test + assert st.get_cofaces([0, 1, 2], 1) == [] + st.remove_maximal_simplex([0, 1, 2]) + assert st.get_skeleton(2) == [ + ([0, 1], 0.0), + ([0, 2], 4.0), + ([0], 0.0), + ([1, 2], 4.0), + ([1], 0.0), + ([2], 4.0), + ] + assert st.find([0, 1, 2]) == False + assert st.find([0, 1]) == True + assert st.find([0, 2]) == True + assert st.find([0]) == True + assert st.find([1]) == True + assert st.find([2]) == True + + st.initialize_filtration() + assert st.persistence(persistence_dim_max=True) == [ + (1, (4.0, float("inf"))), + (0, (0.0, float("inf"))), + ] + assert st.__is_persistence_defined() == True + + assert st.betti_numbers() == [1, 1] + assert st.persistent_betti_numbers(-0.1, 10000.0) == [0, 0] + assert st.persistent_betti_numbers(0.0, 10000.0) == [1, 0] + assert st.persistent_betti_numbers(3.9, 10000.0) == [1, 0] + assert st.persistent_betti_numbers(4.0, 10000.0) == [1, 1] + assert st.persistent_betti_numbers(9999.0, 10000.0) == [1, 1] + + +def test_expansion(): + st = SimplexTree() + assert st.__is_defined() == True + assert st.__is_persistence_defined() == False + + # insert test + assert st.insert([3, 2], 0.1) == True + assert st.insert([2, 0], 0.2) == True + assert st.insert([1, 0], 0.3) == True + assert st.insert([3, 1], 0.4) == True + assert st.insert([2, 1], 0.5) == True + assert st.insert([6, 5], 0.6) == True + assert st.insert([4, 2], 0.7) == True + assert st.insert([3, 0], 0.8) == True + assert st.insert([6, 4], 0.9) == True + assert st.insert([6, 3], 1.0) == True + + assert st.num_vertices() == 7 + assert st.num_simplices() == 17 + assert st.get_filtration() == [ + ([2], 0.1), + ([3], 0.1), + ([2, 3], 0.1), + ([0], 0.2), + ([0, 2], 0.2), + ([1], 0.3), + ([0, 1], 0.3), + ([1, 3], 0.4), + ([1, 2], 0.5), + ([5], 0.6), + ([6], 0.6), + ([5, 6], 0.6), + ([4], 0.7), + ([2, 4], 0.7), + ([0, 3], 0.8), + ([4, 6], 0.9), + ([3, 6], 1.0), + ] + + st.expansion(3) + assert st.num_vertices() == 7 + assert st.num_simplices() == 22 + st.initialize_filtration() + + assert st.get_filtration() == [ + ([2], 0.1), + ([3], 0.1), + ([2, 3], 0.1), + ([0], 0.2), + ([0, 2], 0.2), + ([1], 0.3), + ([0, 1], 0.3), + ([1, 3], 0.4), + ([1, 2], 0.5), + ([0, 1, 2], 0.5), + ([1, 2, 3], 0.5), + ([5], 0.6), + ([6], 0.6), + ([5, 6], 0.6), + ([4], 0.7), + ([2, 4], 0.7), + ([0, 3], 0.8), + ([0, 1, 3], 0.8), + ([0, 2, 3], 0.8), + ([0, 1, 2, 3], 0.8), + ([4, 6], 0.9), + ([3, 6], 1.0), + ] + + +def test_automatic_dimension(): + st = SimplexTree() + assert st.__is_defined() == True + assert st.__is_persistence_defined() == False + + # insert test + assert st.insert([0, 1, 3], filtration=0.5) == True + assert st.insert([0, 1, 2], filtration=1.0) == True + + assert st.num_vertices() == 4 + assert st.num_simplices() == 11 + + assert st.dimension() == 2 + assert st.upper_bound_dimension() == 2 + + assert st.prune_above_filtration(0.6) == True + assert st.dimension() == 2 + assert st.upper_bound_dimension() == 2 + + st.assign_filtration([0, 1, 3], 0.7) + assert st.filtration([0, 1, 3]) == 0.7 + + st.remove_maximal_simplex([0, 1, 3]) + assert st.upper_bound_dimension() == 2 + assert st.dimension() == 1 + assert st.upper_bound_dimension() == 1 + + +def test_make_filtration_non_decreasing(): + st = SimplexTree() + assert st.__is_defined() == True + assert st.__is_persistence_defined() == False + + # Inserted simplex: + # 1 + # o + # /X\ + # o---o---o---o + # 2 0 3\X/4 + # o + # 5 + assert st.insert([2, 1, 0], filtration=2.0) == True + assert st.insert([3, 0], filtration=2.0) == True + assert st.insert([3, 4, 5], filtration=2.0) == True + + assert st.make_filtration_non_decreasing() == False + + # Because of non decreasing property of simplex tree, { 0 } , { 1 } and + # { 0, 1 } are going to be set from value 2.0 to 1.0 + st.insert([0, 1, 6, 7], filtration=1.0) + + assert st.make_filtration_non_decreasing() == False + + # Modify specific values to test make_filtration_non_decreasing + st.assign_filtration([0, 1, 6, 7], 0.8) + st.assign_filtration([0, 1, 6], 0.9) + st.assign_filtration([0, 6], 0.6) + st.assign_filtration([3, 4, 5], 1.2) + st.assign_filtration([3, 4], 1.1) + st.assign_filtration([4, 5], 1.99) + + assert st.make_filtration_non_decreasing() == True + + assert st.filtration([0, 1, 6, 7]) == 1.0 + assert st.filtration([0, 1, 6]) == 1.0 + assert st.filtration([0, 1]) == 1.0 + assert st.filtration([0]) == 1.0 + assert st.filtration([1]) == 1.0 + assert st.filtration([3, 4, 5]) == 2.0 + assert st.filtration([3, 4]) == 2.0 + assert st.filtration([4, 5]) == 2.0 diff --git a/src/python/test/test_subsampling.py b/src/python/test/test_subsampling.py new file mode 100755 index 00000000..c816e203 --- /dev/null +++ b/src/python/test/test_subsampling.py @@ -0,0 +1,179 @@ +import gudhi + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_write_off_file_for_tests(): + file = open("subsample.off", "w") + file.write("nOFF\n") + file.write("2 7 0 0\n") + file.write("1.0 1.0\n") + file.write("7.0 0.0\n") + file.write("4.0 6.0\n") + file.write("9.0 6.0\n") + file.write("0.0 14.0\n") + file.write("2.0 19.0\n") + file.write("9.0 17.0\n") + file.close() + + +def test_simple_choose_n_farthest_points_with_a_starting_point(): + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] + i = 0 + for point in point_set: + # The iteration starts with the given starting point + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=1, starting_point=i + ) + assert sub_set[0] == point_set[i] + i = i + 1 + + # The iteration finds then the farthest + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=1 + ) + assert sub_set[1] == point_set[3] + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=3 + ) + assert sub_set[1] == point_set[1] + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=0 + ) + assert sub_set[1] == point_set[2] + sub_set = gudhi.choose_n_farthest_points( + points=point_set, nb_points=2, starting_point=2 + ) + assert sub_set[1] == point_set[0] + + # Test the limits + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=0) == [] + ) + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=0) == [] + ) + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=1) == [] + ) + assert ( + gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=1) == [] + ) + + # From off file test + for i in range(0, 7): + assert ( + len( + gudhi.choose_n_farthest_points( + off_file="subsample.off", nb_points=i, starting_point=i + ) + ) + == i + ) + + +def test_simple_choose_n_farthest_points_randomed(): + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] + # Test the limits + assert gudhi.choose_n_farthest_points(points=[], nb_points=0) == [] + assert gudhi.choose_n_farthest_points(points=[], nb_points=1) == [] + assert gudhi.choose_n_farthest_points(points=point_set, nb_points=0) == [] + + # Go furter than point set on purpose + for iter in range(1, 10): + sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=iter) + for sub in sub_set: + found = False + for point in point_set: + if point == sub: + found = True + # Check each sub set point is existing in the point set + assert found == True + + # From off file test + for i in range(0, 7): + assert ( + len(gudhi.choose_n_farthest_points(off_file="subsample.off", nb_points=i)) + == i + ) + + +def test_simple_pick_n_random_points(): + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] + # Test the limits + assert gudhi.pick_n_random_points(points=[], nb_points=0) == [] + assert gudhi.pick_n_random_points(points=[], nb_points=1) == [] + assert gudhi.pick_n_random_points(points=point_set, nb_points=0) == [] + + # Go furter than point set on purpose + for iter in range(1, 10): + sub_set = gudhi.pick_n_random_points(points=point_set, nb_points=iter) + print(5) + for sub in sub_set: + found = False + for point in point_set: + if point == sub: + found = True + # Check each sub set point is existing in the point set + assert found == True + + # From off file test + for i in range(0, 7): + assert ( + len(gudhi.pick_n_random_points(off_file="subsample.off", nb_points=i)) == i + ) + + +def test_simple_sparsify_points(): + point_set = [[0, 1], [0, 0], [1, 0], [1, 1]] + # Test the limits + # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 0.0) == [] + # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 10.0) == [] + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=0.0) == point_set + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=1.0) == point_set + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.0) == [ + [0, 1], + [1, 0], + ] + assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.01) == [[0, 1]] + + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=0.0)) + == 7 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=30.0)) + == 5 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.0)) + == 4 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=90.0)) + == 3 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=100.0)) + == 2 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.0)) + == 2 + ) + assert ( + len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.01)) + == 1 + ) diff --git a/src/python/test/test_tangential_complex.py b/src/python/test/test_tangential_complex.py new file mode 100755 index 00000000..0f828d8e --- /dev/null +++ b/src/python/test/test_tangential_complex.py @@ -0,0 +1,55 @@ +from gudhi import TangentialComplex, SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_tangential(): + point_list = [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]] + tc = TangentialComplex(intrisic_dim=1, points=point_list) + assert tc.__is_defined() == True + assert tc.num_vertices() == 4 + assert tc.num_simplices() == 0 + assert tc.num_inconsistent_simplices() == 0 + assert tc.num_inconsistent_stars() == 0 + + tc.compute_tangential_complex() + assert tc.num_vertices() == 4 + assert tc.num_simplices() == 4 + assert tc.num_inconsistent_simplices() == 0 + assert tc.num_inconsistent_stars() == 0 + + st = tc.create_simplex_tree() + assert st.__is_defined() == True + assert st.__is_persistence_defined() == False + + assert st.num_simplices() == 6 + assert st.num_vertices() == 4 + + assert st.get_filtration() == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([0, 2], 0.0), + ([3], 0.0), + ([1, 3], 0.0), + ] + assert st.get_cofaces([0], 1) == [([0, 2], 0.0)] + + assert point_list[0] == tc.get_point(0) + assert point_list[1] == tc.get_point(1) + assert point_list[2] == tc.get_point(2) + assert point_list[3] == tc.get_point(3) + assert tc.get_point(4) == [] + assert tc.get_point(125) == [] diff --git a/src/python/test/test_witness_complex.py b/src/python/test/test_witness_complex.py new file mode 100755 index 00000000..36ced635 --- /dev/null +++ b/src/python/test/test_witness_complex.py @@ -0,0 +1,62 @@ +from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree + +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Vincent Rouvreau + + Copyright (C) 2016 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +__author__ = "Vincent Rouvreau" +__copyright__ = "Copyright (C) 2016 Inria" +__license__ = "MIT" + + +def test_empty_witness_complex(): + witness = WitnessComplex() + assert witness.__is_defined() == False + + +def test_witness_complex(): + nearest_landmark_table = [ + [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], + [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], + [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], + [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], + [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]], + ] + + witness_complex = WitnessComplex(nearest_landmark_table=nearest_landmark_table) + simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1) + assert simplex_tree.num_vertices() == 5 + assert simplex_tree.num_simplices() == 31 + simplex_tree = witness_complex.create_simplex_tree( + max_alpha_square=4.1, limit_dimension=2 + ) + assert simplex_tree.num_vertices() == 5 + assert simplex_tree.num_simplices() == 25 + + +def test_strong_witness_complex(): + nearest_landmark_table = [ + [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]], + [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]], + [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]], + [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]], + [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]], + ] + + strong_witness_complex = StrongWitnessComplex( + nearest_landmark_table=nearest_landmark_table + ) + simplex_tree = strong_witness_complex.create_simplex_tree(max_alpha_square=4.1) + assert simplex_tree.num_vertices() == 5 + assert simplex_tree.num_simplices() == 31 + simplex_tree = strong_witness_complex.create_simplex_tree( + max_alpha_square=4.1, limit_dimension=2 + ) + assert simplex_tree.num_vertices() == 5 + assert simplex_tree.num_simplices() == 25 -- cgit v1.2.3 From 02bac3382cdd9cbeb1cff5d8d13cbc010548dfb0 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 9 Sep 2019 16:44:19 +0200 Subject: typo after search and replace --- src/python/doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst index 3a853cd9..d8b6f861 100644 --- a/src/python/doc/installation.rst +++ b/src/python/doc/installation.rst @@ -117,7 +117,7 @@ A complete configuration would be : .. code-block:: none Python version 3.6.5 - python version 0.28.2 + Cython version 0.28.2 Pytest version 3.3.2 Matplotlib version 2.2.2 Numpy version 1.14.5 -- cgit v1.2.3 From c89a9b2fba95296b16b809ac8f99880173852ae3 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 9 Sep 2019 17:32:15 +0200 Subject: Add some debug traces to fix windows fail --- .appveyor.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 4b432277..cf68d0ce 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -54,7 +54,10 @@ build_script: - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( - cd src/python & + cd src & + dir & + cd python & + dir & MSBuild Python.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release ) else ( -- cgit v1.2.3 From 9ff122b98c3d59654ca6c75f50acd79fef9cd982 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 9 Sep 2019 21:21:39 +0200 Subject: Target is Cython on Windows, but was renamed python. --- .appveyor.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index cf68d0ce..5f9c4ef9 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -54,11 +54,8 @@ build_script: - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( - cd src & - dir & - cd python & - dir & - MSBuild Python.sln /m /p:Configuration=Release /p:Platform=x64 & + cd src/python & + MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & -- cgit v1.2.3 From 18225f328d286790f0596be1a58a0ab28e4a096c Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 10 Sep 2019 09:38:59 +0200 Subject: Add debug traces --- .appveyor.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 5f9c4ef9..94441b67 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -13,17 +13,17 @@ environment: APPVEYOR_SAVE_CACHE_ON_ERROR: true matrix: - - target: Examples - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" + #- target: Examples + # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + # PYTHON: "C:\\Python37-x64" - - target: UnitaryTests - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" + #- target: UnitaryTests + # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + # PYTHON: "C:\\Python37-x64" - - target: Utilities - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF - PYTHON: "C:\\Python37-x64" + #- target: Utilities + # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF + # PYTHON: "C:\\Python37-x64" - target: Python CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" @@ -55,6 +55,7 @@ build_script: - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( cd src/python & + dir & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release ) else ( -- cgit v1.2.3 From 2a313b489f1412f7e9d65681cea622d88828cba3 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 10 Sep 2019 10:01:52 +0200 Subject: Remove traces as it seems to work --- .appveyor.yml | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 94441b67..5f9c4ef9 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -13,17 +13,17 @@ environment: APPVEYOR_SAVE_CACHE_ON_ERROR: true matrix: - #- target: Examples - # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - # PYTHON: "C:\\Python37-x64" + - target: Examples + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - #- target: UnitaryTests - # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF - # PYTHON: "C:\\Python37-x64" + - target: UnitaryTests + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - #- target: Utilities - # CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF - # PYTHON: "C:\\Python37-x64" + - target: Utilities + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF + PYTHON: "C:\\Python37-x64" - target: Python CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" @@ -55,7 +55,6 @@ build_script: - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( cd src/python & - dir & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 -C Release ) else ( -- cgit v1.2.3 From 47cb6bac455e97f38ea509d4b76317a1924c4846 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 10 Sep 2019 16:35:02 +0200 Subject: Rename private variables as some of them are reserved --- src/python/gudhi/__init__.py.in | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/python/gudhi/__init__.py.in b/src/python/gudhi/__init__.py.in index 947aa3c9..28bab0e1 100644 --- a/src/python/gudhi/__init__.py.in +++ b/src/python/gudhi/__init__.py.in @@ -23,18 +23,18 @@ from importlib import import_module __all__ = [@GUDHI_PYTHON_MODULES@] -__available_modules__ = '' -__missing_modules__ = '' +__available_modules = '' +__missing_modules = '' -# try to import * from gudhi.__module_name__ -for __module_name__ in __all__: +# try to import * from gudhi.__module_name +for __module_name in __all__: try: - __module__ = import_module('gudhi.' + __module_name__) + __module = import_module('gudhi.' + __module_name) try: - __to_import__ = __module__.__all__ + __to_import = __module.__all__ except AttributeError: - __to_import__ = [name for name in __module__.__dict__ if not name.startswith('_')] - globals().update({name: __module__.__dict__[name] for name in __to_import__}) - __available_modules__ += __module_name__ + ";" + __to_import = [name for name in __module.__dict__ if not name.startswith('_')] + globals().update({name: __module.__dict__[name] for name in __to_import}) + __available_modules += __module_name + ";" except: - __missing_modules__ += __module_name__ + ";" + __missing_modules += __module_name + ";" -- cgit v1.2.3 From 14e5d696d3b0a6f6bd89d570956296473aba2e51 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 11 Sep 2019 08:48:02 +0200 Subject: New release candidate --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index bc34d9c7..f8927c15 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 0) -set (GUDHI_PATCH_VERSION 0.rc1) +set (GUDHI_PATCH_VERSION 0.rc2) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From db72c88f069ee4be18546c93342fbee5803acd43 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 11 Sep 2019 08:48:31 +0200 Subject: Rename Eigen3 as Eigen (sphinx warning) --- src/python/doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst index d8b6f861..5a6ad9f4 100644 --- a/src/python/doc/installation.rst +++ b/src/python/doc/installation.rst @@ -138,7 +138,7 @@ Documentation To build the documentation, `sphinx-doc `_ and `sphinxcontrib-bibtex `_ are -required. As the documentation is auto-tested, `CGAL`_, `Eigen3`_, +required. As the documentation is auto-tested, `CGAL`_, `Eigen`_, `Matplotlib`_, `NumPy`_ and `SciPy`_ are also mandatory to build the documentation. -- cgit v1.2.3 From f3327ed68935808880f433da72e80be77a6723ee Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 18 Sep 2019 07:41:27 +0200 Subject: Fix GMP path for every job --- .appveyor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index ab943fb7..ade94c0d 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -26,7 +26,7 @@ environment: PYTHON: "C:\\Python37-x64" - target: Python - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON PYTHON: "C:\\Python37-x64" @@ -52,7 +52,7 @@ install: build_script: - mkdir build - cd build - - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. + - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% -DGMP_INCLUDE_DIR="c:/Tools/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/Tools/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/Tools/vcpkg/installed/x64-windows/lib" -DCMAKE_TOOLCHAIN_FILE=c:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake .. - if [%target%]==[Python] ( cd src/cython & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & -- cgit v1.2.3 From d21df79601349387c4f7a6a1a3b19483f76f9380 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 20 Sep 2019 10:37:58 +0200 Subject: C++ 14 is the new standard for the project --- src/cmake/modules/GUDHI_compilation_flags.cmake | 2 +- src/python/CMakeLists.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/cmake/modules/GUDHI_compilation_flags.cmake b/src/cmake/modules/GUDHI_compilation_flags.cmake index 86cd531b..6cd2614d 100644 --- a/src/cmake/modules/GUDHI_compilation_flags.cmake +++ b/src/cmake/modules/GUDHI_compilation_flags.cmake @@ -38,7 +38,7 @@ function(can_cgal_use_cxx11_thread_local) check_cxx_source_compiles("${CGAL_CAN_USE_CXX11_THREAD_LOCAL}" CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT) endfunction() -set (CMAKE_CXX_STANDARD 11) +set (CMAKE_CXX_STANDARD 14) enable_testing() diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 9e128d30..5508cbc7 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -73,7 +73,7 @@ if(PYTHONINTERP_FOUND) if(MSVC) set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'/fp:strict', ") else(MSVC) - set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-std=c++11', ") + set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-std=c++14', ") endif(MSVC) if(CMAKE_COMPILER_IS_GNUCXX) set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ") -- cgit v1.2.3 From 89d678a78dd2bf05ef3d00e889fe0d9c51fb3b32 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 20 Sep 2019 17:36:57 +0200 Subject: c++14 in installation documentation --- src/common/doc/installation.h | 2 +- src/python/doc/installation.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 54f86573..2e64bef8 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -5,7 +5,7 @@ * Examples of GUDHI headers inclusion can be found in \ref utilities. * * \section compiling Compiling - * The library uses c++11 and requires Boost ≥ 1.56.0 + * The library uses c++14 and requires Boost ≥ 1.56.0 * and CMake ≥ 3.1. * It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015. * diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst index 5a6ad9f4..77d9e8b3 100644 --- a/src/python/doc/installation.rst +++ b/src/python/doc/installation.rst @@ -12,7 +12,7 @@ The easiest way to install the Python version of GUDHI is using Compiling ********* -The library uses c++11 and requires `Boost `_ ≥ 1.56.0, +The library uses c++14 and requires `Boost `_ ≥ 1.56.0, `CMake `_ ≥ 3.1 to generate makefiles, `NumPy `_ and `Cython `_ to compile the GUDHI Python module. -- cgit v1.2.3 From de8e4aba94a0d5ecf933ad3ee1c05ccb866288b0 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 23 Sep 2019 17:34:32 +0200 Subject: 3.0.0 release --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index f8927c15..eb2a0666 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 0) -set (GUDHI_PATCH_VERSION 0.rc2) +set (GUDHI_PATCH_VERSION 0) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 2268124c398fdfb83547bec4f9d704dffdd0f673 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 24 Sep 2019 13:28:26 +0200 Subject: Rename Dockerfile for circleci. Add a Dockerfile to test the installation --- Dockerfile_for_circleci_image | 58 ++++++++++++++++++++++++++++++++++++++ Dockerfile_gudhi_installation | 65 +++++++++++++++++++++++++++++++++++++++++++ Dockerfile_ubuntu | 58 -------------------------------------- 3 files changed, 123 insertions(+), 58 deletions(-) create mode 100644 Dockerfile_for_circleci_image create mode 100644 Dockerfile_gudhi_installation delete mode 100644 Dockerfile_ubuntu diff --git a/Dockerfile_for_circleci_image b/Dockerfile_for_circleci_image new file mode 100644 index 00000000..e149a33a --- /dev/null +++ b/Dockerfile_for_circleci_image @@ -0,0 +1,58 @@ +FROM ubuntu:19.04 + +# Update and upgrade distribution +RUN apt-get update && \ + apt-get upgrade -y + +# Tools necessary for installing and configuring Ubuntu +RUN apt-get install -y \ + apt-utils \ + locales \ + tzdata + +# Timezone +RUN echo "Europe/Paris" | tee /etc/timezone && \ + ln -fs /usr/share/zoneinfo/Europe/Paris /etc/localtime && \ + dpkg-reconfigure -f noninteractive tzdata + +# Locale with UTF-8 support +RUN echo en_US.UTF-8 UTF-8 >> /etc/locale.gen && \ + locale-gen && \ + update-locale LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 + +# Required for Gudhi compilation +RUN apt-get install -y make \ + g++ \ + cmake \ + graphviz \ + perl \ + texlive-bibtex-extra \ + biber \ + doxygen \ + libboost-all-dev \ + libeigen3-dev \ + libgmp3-dev \ + libmpfr-dev \ + libtbb-dev \ + libcgal-dev \ + locales \ + python3 \ + python3-pip \ + python3-pytest \ + python3-tk \ + libfreetype6-dev \ + pkg-config + +RUN pip3 install \ + numpy \ + matplotlib \ + scipy \ + Cython \ + sphinx \ + sphinxcontrib-bibtex + +# apt clean up +RUN apt autoremove && rm -rf /var/lib/apt/lists/* diff --git a/Dockerfile_gudhi_installation b/Dockerfile_gudhi_installation new file mode 100644 index 00000000..9fe20730 --- /dev/null +++ b/Dockerfile_gudhi_installation @@ -0,0 +1,65 @@ +FROM ubuntu:19.04 + +# Update and upgrade distribution +RUN apt-get update && \ + apt-get upgrade -y + +# Tools necessary for installing and configuring Ubuntu +RUN apt-get install -y \ + apt-utils \ + locales \ + tzdata + +# Timezone +RUN echo "Europe/Paris" | tee /etc/timezone && \ + ln -fs /usr/share/zoneinfo/Europe/Paris /etc/localtime && \ + dpkg-reconfigure -f noninteractive tzdata + +# Locale with UTF-8 support +RUN echo en_US.UTF-8 UTF-8 >> /etc/locale.gen && \ + locale-gen && \ + update-locale LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 + +# Required for Gudhi compilation +RUN apt-get install -y make \ + g++ \ + cmake \ + graphviz \ + perl \ + texlive-bibtex-extra \ + biber \ + libboost-all-dev \ + libeigen3-dev \ + libgmp3-dev \ + libmpfr-dev \ + libtbb-dev \ + libcgal-dev \ + locales \ + python3 \ + python3-pip \ + python3-pytest \ + python3-tk \ + libfreetype6-dev \ + pkg-config \ + curl + +RUN pip3 install \ + numpy \ + matplotlib \ + scipy \ + Cython + +# apt clean up +RUN apt autoremove && rm -rf /var/lib/apt/lists/* + +RUN curl -LO "https://github.com/GUDHI/gudhi-devel/releases/download/tags%2Fgudhi-release-3.0.0/gudhi.3.0.0.tar.gz" \ +&& tar xf gudhi.3.0.0.tar.gz \ +&& cd gudhi.3.0.0 \ +&& mkdir build && cd build && cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_PYTHON=OFF -DPython_ADDITIONAL_VERSIONS=3 .. \ +&& make all test install \ +&& cmake -DWITH_GUDHI_PYTHON=ON . \ +&& cd python \ +&& python3 setup.py install \ No newline at end of file diff --git a/Dockerfile_ubuntu b/Dockerfile_ubuntu deleted file mode 100644 index e149a33a..00000000 --- a/Dockerfile_ubuntu +++ /dev/null @@ -1,58 +0,0 @@ -FROM ubuntu:19.04 - -# Update and upgrade distribution -RUN apt-get update && \ - apt-get upgrade -y - -# Tools necessary for installing and configuring Ubuntu -RUN apt-get install -y \ - apt-utils \ - locales \ - tzdata - -# Timezone -RUN echo "Europe/Paris" | tee /etc/timezone && \ - ln -fs /usr/share/zoneinfo/Europe/Paris /etc/localtime && \ - dpkg-reconfigure -f noninteractive tzdata - -# Locale with UTF-8 support -RUN echo en_US.UTF-8 UTF-8 >> /etc/locale.gen && \ - locale-gen && \ - update-locale LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 -ENV LANG en_US.UTF-8 -ENV LANGUAGE en_US:en -ENV LC_ALL en_US.UTF-8 - -# Required for Gudhi compilation -RUN apt-get install -y make \ - g++ \ - cmake \ - graphviz \ - perl \ - texlive-bibtex-extra \ - biber \ - doxygen \ - libboost-all-dev \ - libeigen3-dev \ - libgmp3-dev \ - libmpfr-dev \ - libtbb-dev \ - libcgal-dev \ - locales \ - python3 \ - python3-pip \ - python3-pytest \ - python3-tk \ - libfreetype6-dev \ - pkg-config - -RUN pip3 install \ - numpy \ - matplotlib \ - scipy \ - Cython \ - sphinx \ - sphinxcontrib-bibtex - -# apt clean up -RUN apt autoremove && rm -rf /var/lib/apt/lists/* -- cgit v1.2.3 From 8cbeb21d32701e7ae848062116cae91833667900 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 24 Sep 2019 14:07:47 +0200 Subject: Add POT for wasserstein purpose --- Dockerfile_ubuntu | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile_ubuntu b/Dockerfile_ubuntu index e149a33a..12f2dc94 100644 --- a/Dockerfile_ubuntu +++ b/Dockerfile_ubuntu @@ -51,6 +51,7 @@ RUN pip3 install \ matplotlib \ scipy \ Cython \ + POT \ sphinx \ sphinxcontrib-bibtex -- cgit v1.2.3 From 6e4b12c89f2c5b32c19076878d86212ffa27e817 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 2 Oct 2019 11:34:48 +0200 Subject: Fix DOI Fix #99 --- biblio/bibliography.bib | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index d1b2f558..a1b951e0 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -1076,7 +1076,7 @@ language={English} journal = {Computational Geometry: Theory and Applications}, volume = {58}, pages = {70--96}, - doi = "https://doi.org/10.1016/j.comgeo.2016.07.001", + doi = "10.1016/j.comgeo.2016.07.001", year = {2016} } -- cgit v1.2.3 From b1824e4de6fd1d037af3c1341c3065731472ffc8 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau <10407034+VincentRouvreau@users.noreply.github.com> Date: Tue, 15 Oct 2019 17:37:01 +0200 Subject: Add conda package badges --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 8636ac77..167a38b3 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ [![Build Status](https://travis-ci.org/GUDHI/gudhi-devel.svg?branch=master)](https://travis-ci.org/GUDHI/gudhi-devel) [![CircleCI](https://circleci.com/gh/GUDHI/gudhi-devel/tree/master.svg?style=svg)](https://circleci.com/gh/GUDHI/gudhi-devel/tree/master) [![Build status](https://ci.appveyor.com/api/projects/status/976j2uut8xgalvx2/branch/master?svg=true)](https://ci.appveyor.com/project/GUDHI/gudhi-devel/branch/master) +[![Anaconda Cloud](https://anaconda.org/conda-forge/gudhi/badges/version.svg)](https://anaconda.org/conda-forge/gudhi) +[![Anaconda downloads](https://anaconda.org/conda-forge/gudhi/badges/downloads.svg)](https://anaconda.org/conda-forge/gudhi) ![GUDHI](src/common/doc/Gudhi_banner.png "Topological Data Analysis (TDA) and Higher Dimensional Geometry Understanding") -- cgit v1.2.3 From a6c844172072f1798bbbe6d5c9b4f3a548cb4c50 Mon Sep 17 00:00:00 2001 From: Siargey Kachanovich Date: Thu, 17 Oct 2019 14:21:07 +0200 Subject: Include for std::fabs --- src/common/include/gudhi/Unitary_tests_utils.h | 1 + 1 file changed, 1 insertion(+) diff --git a/src/common/include/gudhi/Unitary_tests_utils.h b/src/common/include/gudhi/Unitary_tests_utils.h index 4ad4dae8..7d039304 100644 --- a/src/common/include/gudhi/Unitary_tests_utils.h +++ b/src/common/include/gudhi/Unitary_tests_utils.h @@ -14,6 +14,7 @@ #include #include // for std::numeric_limits<> +#include // for std::fabs template void GUDHI_TEST_FLOAT_EQUALITY_CHECK(FloatingType a, FloatingType b, -- cgit v1.2.3