From 1ae99c5f04b6d14a730bdc8fea2e77d33665cd26 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 14 Dec 2017 12:31:48 +0000 Subject: Add cmake improvement for Persistence representation module. Was bad tested and too hard to maintain git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/persistence_representation_integration_cmake_improvement@3071 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: fa4684701edbe6d172e75f119afa912eb3b5dad7 --- src/Persistence_representations/example/CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/Persistence_representations/example') diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index 7788b603..3a6696b7 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required(VERSION 2.6) project(Persistence_representations_example) -file(COPY "${CMAKE_SOURCE_DIR}/data/persistence_diagram/simple_diagram.txt" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") +#file(COPY "${CMAKE_SOURCE_DIR}/data/persistence_diagram/simple_diagram.txt" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") add_executable ( Persistence_representations_example_landscape_on_grid persistence_landscape_on_grid.cpp ) target_link_libraries(Persistence_representations_example_landscape_on_grid ${Boost_SYSTEM_LIBRARY}) @@ -17,7 +17,7 @@ add_executable ( Persistence_representations_example_intervals persistence_inter target_link_libraries(Persistence_representations_example_intervals ${Boost_SYSTEM_LIBRARY}) add_test(NAME Persistence_representations_example_intervals COMMAND $ - "${CMAKE_CURRENT_BINARY_DIR}/simple_diagram.txt") + "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers") add_executable ( Persistence_representations_example_vectors persistence_vectors.cpp ) target_link_libraries(Persistence_representations_example_vectors ${Boost_SYSTEM_LIBRARY}) -- cgit v1.2.3 From 80aa35b3ad53c95607221167b72d9aa00e1f71b5 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 14 Dec 2017 12:45:10 +0000 Subject: Forgotten git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/persistence_representation_integration_cmake_improvement@3072 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 7a6ef2d5fd72f53a396464bba4035f5610552c9b --- src/Persistence_representations/example/CMakeLists.txt | 2 -- 1 file changed, 2 deletions(-) (limited to 'src/Persistence_representations/example') diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index 3a6696b7..b8ce8ea6 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -1,8 +1,6 @@ cmake_minimum_required(VERSION 2.6) project(Persistence_representations_example) -#file(COPY "${CMAKE_SOURCE_DIR}/data/persistence_diagram/simple_diagram.txt" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") - add_executable ( Persistence_representations_example_landscape_on_grid persistence_landscape_on_grid.cpp ) target_link_libraries(Persistence_representations_example_landscape_on_grid ${Boost_SYSTEM_LIBRARY}) add_test(NAME Persistence_representations_example_landscape_on_grid -- cgit v1.2.3 From 97a6bdd67d8cff9bc8c0126df86be044d393583c Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Mon, 8 Jan 2018 14:04:26 +0000 Subject: clang-format files git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3118 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: c8371fc65da4f9912b602a4f3a6701661e46580e --- .../doc/Persistence_representations_doc.h | 7 +-- .../example/persistence_vectors.cpp | 2 +- .../test/persistence_heat_maps_test.cpp | 14 ++--- .../test/persistence_intervals_test.cpp | 6 +-- .../test/persistence_lanscapes_on_grid_test.cpp | 10 ++-- .../test/persistence_lanscapes_test.cpp | 60 ++++++---------------- .../test/read_persistence_from_file_test.cpp | 10 ++-- .../test/vector_representation_test.cpp | 6 +-- ...h_m_weighted_by_arctan_of_their_persistence.cpp | 2 +- ...te_p_h_m_weighted_by_distance_from_diagonal.cpp | 3 +- ...ate_p_h_m_weighted_by_squared_diag_distance.cpp | 4 +- ...ompute_scalar_product_of_landscapes_on_grid.cpp | 7 +-- .../create_landscapes_on_grid.cpp | 1 - .../average_persistence_vectors.cpp | 3 +- .../compute_distance_of_persistence_vectors.cpp | 3 +- ...mpute_scalar_product_of_persistence_vectors.cpp | 3 +- .../create_persistence_vectors.cpp | 3 +- .../plot_persistence_vectors.cpp | 3 +- 18 files changed, 53 insertions(+), 94 deletions(-) (limited to 'src/Persistence_representations/example') diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index c77e75e2..978fb5bd 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -83,8 +83,8 @@ namespace Persistence_representations { \li Persistence vectors (allow averaging, computation of distances, scalar products, vectorizations and real value characteristics). \li Persistence diagrams / barcodes (allow computation of distances, vectorizations and real value characteristics). - - + + Note that at the while functionalities like averaging, distances and scalar products are fixed, there is no canonical way of vectorizing and computing real valued characteristics of objects. Therefore the vectorizations and computation of real value characteristics procedures are quite likely to evolve in the furthering @@ -139,7 +139,8 @@ namespace Persistence_representations { possible ways to proceed: \li Use non exact representation on a grid described in the Section \ref sec_landscapes_on_grid. - \li Compute just a number of initial nonzero landscapes. This option is available from C++ level as a last parameter of the constructor of persistence landscape (set by default to std::numeric_limits::max()). + \li Compute just a number of initial nonzero landscapes. This option is available from C++ level as a last parameter of + the constructor of persistence landscape (set by default to std::numeric_limits::max()). diff --git a/src/Persistence_representations/example/persistence_vectors.cpp b/src/Persistence_representations/example/persistence_vectors.cpp index 59eca152..834ae644 100644 --- a/src/Persistence_representations/example/persistence_vectors.cpp +++ b/src/Persistence_representations/example/persistence_vectors.cpp @@ -30,7 +30,7 @@ #include using Vector_distances_in_diagram = - Gudhi::Persistence_representations::Vector_distances_in_diagram; + Gudhi::Persistence_representations::Vector_distances_in_diagram; int main(int argc, char** argv) { // create two simple vectors with birth--death pairs: diff --git a/src/Persistence_representations/test/persistence_heat_maps_test.cpp b/src/Persistence_representations/test/persistence_heat_maps_test.cpp index 4da13b86..e36108b7 100644 --- a/src/Persistence_representations/test/persistence_heat_maps_test.cpp +++ b/src/Persistence_representations/test/persistence_heat_maps_test.cpp @@ -142,15 +142,15 @@ BOOST_AUTO_TEST_CASE(check_distance_for_heat_maps) { Persistence_heat_maps q("data/file_with_diagram_1", filter, false, 1000, 0, 1); Persistence_heat_maps r("data/file_with_diagram_2", filter, false, 1000, 0, 1); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.distance(p), 0., epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.distance(p), 0., epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.distance(q), 624.183, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.distance(r), 415.815, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.distance(p), 624.183, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.distance(q), 0., epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.distance(q), 0., epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.distance(r), 528.066, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.distance(p), 415.815, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.distance(q), 528.066, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.distance(r), 0., epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.distance(r), 0., epsilon); } BOOST_AUTO_TEST_CASE(check_projections_to_R_for_heat_maps) { @@ -174,11 +174,11 @@ BOOST_AUTO_TEST_CASE(check_scalar_products_for_heat_maps) { GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_scalar_product(q), 0.0509357, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_scalar_product(r), 0.0375608, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.compute_scalar_product(p), 0.0509357, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.compute_scalar_product(q), 1.31293 , epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.compute_scalar_product(r), 0.536799 , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.compute_scalar_product(q), 1.31293, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(q.compute_scalar_product(r), 0.536799, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.compute_scalar_product(p), 0.0375608, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.compute_scalar_product(q), 0.536799 , epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.compute_scalar_product(r), 0.672907 , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.compute_scalar_product(q), 0.536799, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(r.compute_scalar_product(r), 0.672907, epsilon); } BOOST_AUTO_TEST_CASE(check_arythmetic_operations_for_heat_maps) { diff --git a/src/Persistence_representations/test/persistence_intervals_test.cpp b/src/Persistence_representations/test/persistence_intervals_test.cpp index c8f67966..f555e243 100644 --- a/src/Persistence_representations/test/persistence_intervals_test.cpp +++ b/src/Persistence_representations/test/persistence_intervals_test.cpp @@ -266,8 +266,7 @@ BOOST_AUTO_TEST_CASE(check_compute_persistent_betti_numbers) { std::vector > pbns_new = p.compute_persistent_betti_numbers(); for (size_t i = 0; i != pbns.size(); ++i) { - GUDHI_TEST_FLOAT_EQUALITY_CHECK(pbns[i].first, pbns_new[i].first, - Gudhi::Persistence_representations::epsi); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(pbns[i].first, pbns_new[i].first, Gudhi::Persistence_representations::epsi); BOOST_CHECK(pbns[i].second == pbns_new[i].second); } } @@ -288,7 +287,6 @@ BOOST_AUTO_TEST_CASE(check_k_n_n) { knn_template.push_back(0.786945); for (size_t i = 0; i != knn.size(); ++i) { - GUDHI_TEST_FLOAT_EQUALITY_CHECK(knn[i], knn_template[i], - Gudhi::Persistence_representations::epsi); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(knn[i], knn_template[i], Gudhi::Persistence_representations::epsi); } } diff --git a/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp b/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp index 8e2808d0..130ac8cc 100644 --- a/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp +++ b/src/Persistence_representations/test/persistence_lanscapes_on_grid_test.cpp @@ -179,8 +179,8 @@ BOOST_AUTO_TEST_CASE(check_computations_of_maxima_and_norms) { Persistence_landscape_on_grid sum = p + second; GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_maximum(), 0.46, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(1), 27.3373 , epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(2), 1.84143 , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(1), 27.3373, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(2), 1.84143, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(3), 0.927067, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(compute_distance_of_landscapes_on_grid(p, sum, 1), 16.8519, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(compute_distance_of_landscapes_on_grid(p, sum, 2), 1.44542, epsilon); @@ -189,12 +189,10 @@ BOOST_AUTO_TEST_CASE(check_computations_of_maxima_and_norms) { } BOOST_AUTO_TEST_CASE(check_default_parameters_of_distances) { - std::vector > diag = - read_persistence_intervals_in_dimension("data/file_with_diagram"); + std::vector > diag = read_persistence_intervals_in_dimension("data/file_with_diagram"); Persistence_landscape_on_grid p(diag, 0., 1., 100); - std::vector > diag1 = - read_persistence_intervals_in_dimension("data/file_with_diagram_1"); + std::vector > diag1 = read_persistence_intervals_in_dimension("data/file_with_diagram_1"); Persistence_landscape_on_grid q(diag1, 0., 1., 100); double dist_numeric_limit_max = p.distance(q, std::numeric_limits::max()); diff --git a/src/Persistence_representations/test/persistence_lanscapes_test.cpp b/src/Persistence_representations/test/persistence_lanscapes_test.cpp index e7267bec..e98ef894 100644 --- a/src/Persistence_representations/test/persistence_lanscapes_test.cpp +++ b/src/Persistence_representations/test/persistence_lanscapes_test.cpp @@ -35,12 +35,12 @@ using namespace Gudhi::Persistence_representations; double epsilon = 0.0005; -BOOST_AUTO_TEST_CASE(check_construction_of_landscape) { +BOOST_AUTO_TEST_CASE(check_construction_of_landscape) { std::vector > diag = - read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram"); + read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram"); Persistence_landscape p(diag); - Persistence_landscape q; - q.load_landscape_from_file("data/file_with_landscape_from_file_with_diagram"); + Persistence_landscape q; + q.load_landscape_from_file("data/file_with_landscape_from_file_with_diagram"); BOOST_CHECK(p == q); } @@ -48,12 +48,10 @@ BOOST_AUTO_TEST_CASE(check_construction_of_landscape_form_gudhi_style_file) { Persistence_landscape p("data/persistence_file_with_four_entries_per_line", 1); // p.print_to_file("persistence_file_with_four_entries_per_line_landscape"); Persistence_landscape q; - q.load_landscape_from_file("data/persistence_file_with_four_entries_per_line_landscape"); + q.load_landscape_from_file("data/persistence_file_with_four_entries_per_line_landscape"); BOOST_CHECK(p == q); } - - BOOST_AUTO_TEST_CASE(check_computations_of_integrals) { std::vector > diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram"); @@ -123,18 +121,18 @@ BOOST_AUTO_TEST_CASE(check_computations_of_values_on_different_points) { read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram"); Persistence_landscape p(diag); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(1, 0.0), 0. , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(1, 0.0), 0., epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(1, 0.1), 0.0692324, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(1, 0.2), 0.163369 , epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(1, 0.3), 0.217115 , epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(2, 0.0), 0. , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(1, 0.2), 0.163369, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(1, 0.3), 0.217115, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(2, 0.0), 0., epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(2, 0.1), 0.0633688, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(2, 0.2), 0.122361 , epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(2, 0.3), 0.195401 , epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(3, 0.0), 0. , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(2, 0.2), 0.122361, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(2, 0.3), 0.195401, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(3, 0.0), 0., epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(3, 0.1), 0.0455386, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(3, 0.2), 0.0954012, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(3, 0.3), 0.185282 , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_value_at_a_given_point(3, 0.3), 0.185282, epsilon); } BOOST_AUTO_TEST_CASE(check_computations_sum_differences_and_multiplications) { @@ -170,14 +168,14 @@ BOOST_AUTO_TEST_CASE(check_computations_of_maxima_and_norms) { second.load_landscape_from_file("data/file_with_landscape_from_file_with_diagram_1"); Persistence_landscape sum = p + second; - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_maximum() , 0.431313, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(1), 2.34992 , epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_maximum(), 0.431313, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(1), 2.34992, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(2), 0.706095, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_norm_of_landscape(3), 0.501867, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(compute_distance_of_landscapes(p, sum, 1), 27.9323, epsilon); GUDHI_TEST_FLOAT_EQUALITY_CHECK(compute_distance_of_landscapes(p, sum, 2), 2.35199, epsilon); - GUDHI_TEST_FLOAT_EQUALITY_CHECK(compute_distance_of_landscapes(p, sum, std::numeric_limits::max()), - 0.464478, epsilon); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(compute_distance_of_landscapes(p, sum, std::numeric_limits::max()), 0.464478, + epsilon); } BOOST_AUTO_TEST_CASE(check_default_parameters_of_distances) { @@ -232,30 +230,6 @@ BOOST_AUTO_TEST_CASE(check_computations_of_scalar_product) { GUDHI_TEST_FLOAT_EQUALITY_CHECK(p.compute_scalar_product(q), 0.754498, epsilon); } - - - - - - - - - - - - - - - - - - - - - - - - // Below I am storing the code used to generate tests for that functionality. /* if ( argc != 2 ) diff --git a/src/Persistence_representations/test/read_persistence_from_file_test.cpp b/src/Persistence_representations/test/read_persistence_from_file_test.cpp index afdc822c..276b92ab 100644 --- a/src/Persistence_representations/test/read_persistence_from_file_test.cpp +++ b/src/Persistence_representations/test/read_persistence_from_file_test.cpp @@ -30,8 +30,6 @@ using namespace Gudhi; using namespace Gudhi::Persistence_representations; - - BOOST_AUTO_TEST_CASE(test_read_file_with_four_elements_per_line) { std::vector > what_we_should_get; what_we_should_get.push_back(std::make_pair(0, 2)); @@ -39,7 +37,7 @@ BOOST_AUTO_TEST_CASE(test_read_file_with_four_elements_per_line) { what_we_should_get.push_back(std::make_pair(10, 90)); what_we_should_get.push_back(std::make_pair(4, 4)); std::vector > what_we_get = read_persistence_intervals_in_one_dimension_from_file( - "data/persistence_file_with_four_entries_per_line", 1, 1000); + "data/persistence_file_with_four_entries_per_line", 1, 1000); // for ( size_t i = 0 ; i != what_we_get.size() ; ++i ) //{ @@ -76,7 +74,6 @@ BOOST_AUTO_TEST_CASE(test_read_file_with_three_elements_per_line) { } } - BOOST_AUTO_TEST_CASE(test_read_file_with_two_elements_per_line) { std::vector > what_we_should_get; what_we_should_get.push_back(std::make_pair(4, 10)); @@ -84,12 +81,11 @@ BOOST_AUTO_TEST_CASE(test_read_file_with_two_elements_per_line) { what_we_should_get.push_back(std::make_pair(0, 1)); what_we_should_get.push_back(std::make_pair(1, 4)); - std::vector > what_we_get = - read_persistence_intervals_in_one_dimension_from_file("data/persistence_file_with_two_entries_per_line", -1, 9999); + std::vector > what_we_get = read_persistence_intervals_in_one_dimension_from_file( + "data/persistence_file_with_two_entries_per_line", -1, 9999); BOOST_CHECK(what_we_should_get.size() == what_we_get.size()); for (size_t i = 0; i != what_we_get.size(); ++i) { BOOST_CHECK(what_we_should_get[i] == what_we_get[i]); } } - diff --git a/src/Persistence_representations/test/vector_representation_test.cpp b/src/Persistence_representations/test/vector_representation_test.cpp index 7e4ce3ee..c545dce7 100644 --- a/src/Persistence_representations/test/vector_representation_test.cpp +++ b/src/Persistence_representations/test/vector_representation_test.cpp @@ -295,12 +295,10 @@ BOOST_AUTO_TEST_CASE(check_distance_computations) { } BOOST_AUTO_TEST_CASE(check_default_parameters_of_distances) { - std::vector > diag = - read_persistence_intervals_in_dimension("data/file_with_diagram"); + std::vector > diag = read_persistence_intervals_in_dimension("data/file_with_diagram"); Vector_distances_in_diagram p(diag, 100); - std::vector > diag1 = - read_persistence_intervals_in_dimension("data/file_with_diagram_1"); + std::vector > diag1 = read_persistence_intervals_in_dimension("data/file_with_diagram_1"); Vector_distances_in_diagram q(diag1, 100); double dist_numeric_limit_max = p.distance(q, std::numeric_limits::max()); diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp index 47c70484..b4a1daa5 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp @@ -29,7 +29,7 @@ using arc_tan_of_persistence_of_point = Gudhi::Persistence_representations::arc_tan_of_persistence_of_point; using Persistence_heat_maps = - Gudhi::Persistence_representations::Persistence_heat_maps; + Gudhi::Persistence_representations::Persistence_heat_maps; int main(int argc, char** argv) { std::cout << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) " diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp index 659a5105..c50f9ddb 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp @@ -28,8 +28,7 @@ #include using distance_from_diagonal_scaling = Gudhi::Persistence_representations::distance_from_diagonal_scaling; -using Persistence_heat_maps = - Gudhi::Persistence_representations::Persistence_heat_maps; +using Persistence_heat_maps = Gudhi::Persistence_representations::Persistence_heat_maps; int main(int argc, char** argv) { std::cout << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) " diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp index 9497b188..59ff3c24 100644 --- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp +++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp @@ -28,9 +28,9 @@ #include using squared_distance_from_diagonal_scaling = - Gudhi::Persistence_representations::squared_distance_from_diagonal_scaling; + Gudhi::Persistence_representations::squared_distance_from_diagonal_scaling; using Persistence_heat_maps = - Gudhi::Persistence_representations::Persistence_heat_maps; + Gudhi::Persistence_representations::Persistence_heat_maps; int main(int argc, char** argv) { std::cout << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) " diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp index e2222487..01de3dee 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp @@ -29,9 +29,10 @@ using Persistence_landscape_on_grid = Gudhi::Persistence_representations::Persistence_landscape_on_grid; int main(int argc, char** argv) { - std::cout << "This program computes scalar product of persistence landscapes on grid stored in a file (the file needs to " - << "be created beforehand). \n" - << "The parameters of this programs are names of files with persistence landscapes on grid.\n"; + std::cout + << "This program computes scalar product of persistence landscapes on grid stored in a file (the file needs to " + << "be created beforehand). \n" + << "The parameters of this programs are names of files with persistence landscapes on grid.\n"; if (argc < 3) { std::cout << "Wrong number of parameters, the program will now terminate \n"; diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp index 2827f982..78e8ef57 100644 --- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp +++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp @@ -48,7 +48,6 @@ int main(int argc, char** argv) { return 1; } - size_t size_of_grid = (size_t)atoi(argv[1]); double min_ = atof(argv[2]); double max_ = atof(argv[3]); diff --git a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp index 8de7725c..0144e76f 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp @@ -26,8 +26,7 @@ #include using Euclidean_distance = Gudhi::Euclidean_distance; -using Vector_distances_in_diagram = - Gudhi::Persistence_representations::Vector_distances_in_diagram; +using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram; int main(int argc, char** argv) { std::cout << "This program computes average of persistence vectors stored in files (the files needs to " diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp index f8b71e5b..7e66d25e 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp @@ -28,8 +28,7 @@ #include using Euclidean_distance = Gudhi::Euclidean_distance; -using Vector_distances_in_diagram = - Gudhi::Persistence_representations::Vector_distances_in_diagram; +using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram; int main(int argc, char** argv) { std::cout << "This program compute distance of persistence vectors stored in a file (the file needs to be created " diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp index bda0b61b..303c6e3e 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp @@ -28,8 +28,7 @@ #include using Euclidean_distance = Gudhi::Euclidean_distance; -using Vector_distances_in_diagram = - Gudhi::Persistence_representations::Vector_distances_in_diagram; +using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram; int main(int argc, char** argv) { std::cout << "This program computes scalar product of persistence vectors stored in a file (the file needs to " diff --git a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp index 753675cb..cc5e5393 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp @@ -28,8 +28,7 @@ #include using Euclidean_distance = Gudhi::Euclidean_distance; -using Vector_distances_in_diagram = - Gudhi::Persistence_representations::Vector_distances_in_diagram; +using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram; int main(int argc, char** argv) { std::cout << "This program creates persistence vectors files (*.vect) of persistence diagrams files (*.pers) " diff --git a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp index e52b8f7d..aa33107d 100644 --- a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp +++ b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp @@ -26,8 +26,7 @@ #include using Euclidean_distance = Gudhi::Euclidean_distance; -using Vector_distances_in_diagram = - Gudhi::Persistence_representations::Vector_distances_in_diagram; +using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram; int main(int argc, char** argv) { std::cout << "This program create a Gnuplot script to plot persistence vector. Please call this program with the " -- cgit v1.2.3 From 06ff6fac211d2823c7d14a6d2f4a4db03f48d2e3 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 26 Jan 2018 14:01:39 +0000 Subject: Seperate installation and examples from main page Move cover complex utilities from examples GIC.cpp example was not compiled, nor tested. It is removed. Persistence representation : no need to link with Boost_SYSTEM git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3164 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: cf2bfa6c6de2ed359aaa165b9f80bca7e06defb1 --- src/Nerve_GIC/doc/Intro_graph_induced_complex.h | 29 -- src/Nerve_GIC/example/CMakeLists.txt | 12 - src/Nerve_GIC/example/GIC.cpp | 95 ----- .../example/KeplerMapperVisuFromTxtFile.py | 72 ---- src/Nerve_GIC/example/Nerve.cpp | 96 ----- src/Nerve_GIC/example/Nerve.txt | 63 ---- src/Nerve_GIC/example/VoronoiGIC.cpp | 90 ----- src/Nerve_GIC/example/km.py | 390 --------------------- src/Nerve_GIC/example/km.py.COPYRIGHT | 26 -- src/Nerve_GIC/utilities/CMakeLists.txt | 22 ++ .../utilities/KeplerMapperVisuFromTxtFile.py | 72 ++++ src/Nerve_GIC/utilities/Nerve.cpp | 96 +++++ src/Nerve_GIC/utilities/Nerve.txt | 63 ++++ src/Nerve_GIC/utilities/VoronoiGIC.cpp | 90 +++++ src/Nerve_GIC/utilities/km.py | 390 +++++++++++++++++++++ src/Nerve_GIC/utilities/km.py.COPYRIGHT | 26 ++ .../example/CMakeLists.txt | 5 - .../test/CMakeLists.txt | 14 +- src/common/doc/examples.h | 99 ++++++ src/common/doc/installation.h | 263 ++++++++++++++ src/common/doc/main_page.h | 305 +--------------- 21 files changed, 1130 insertions(+), 1188 deletions(-) delete mode 100644 src/Nerve_GIC/example/GIC.cpp delete mode 100755 src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py delete mode 100644 src/Nerve_GIC/example/Nerve.cpp delete mode 100644 src/Nerve_GIC/example/Nerve.txt delete mode 100644 src/Nerve_GIC/example/VoronoiGIC.cpp delete mode 100755 src/Nerve_GIC/example/km.py delete mode 100644 src/Nerve_GIC/example/km.py.COPYRIGHT create mode 100644 src/Nerve_GIC/utilities/CMakeLists.txt create mode 100755 src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py create mode 100644 src/Nerve_GIC/utilities/Nerve.cpp create mode 100644 src/Nerve_GIC/utilities/Nerve.txt create mode 100644 src/Nerve_GIC/utilities/VoronoiGIC.cpp create mode 100755 src/Nerve_GIC/utilities/km.py create mode 100644 src/Nerve_GIC/utilities/km.py.COPYRIGHT create mode 100644 src/common/doc/examples.h create mode 100644 src/common/doc/installation.h (limited to 'src/Persistence_representations/example') diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h index 7578cc53..344cb031 100644 --- a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h +++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h @@ -177,7 +177,6 @@ namespace cover_complex { * \image html "funcGICvisu.jpg" "Visualization with neato" * * \copyright GNU General Public License v3. - * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim */ /** @} */ // end defgroup cover_complex @@ -186,31 +185,3 @@ namespace cover_complex { } // namespace Gudhi #endif // DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_ - - -/* * \subsection gicexample Example with cover from function - * - * This example builds the GIC of a point cloud sampled on a 3D human shape (human.off). - * The cover C comes from the preimages of intervals (with length 0.075 and gain 0) - * covering the height function (coordinate 2), - * and the graph G comes from a Rips complex built with threshold 0.075. - * Note that if the gain is too big, the number of cliques increases a lot, - * which make the computation time much larger. - * - * \include Nerve_GIC/GIC.cpp - * - * When launching: - * - * \code $> ./GIC ../../data/points/human.off 0.075 2 0.075 0 --v - * \endcode - * - * the program outputs SC.txt, which can be visualized with python and firefox as before: - * - * \image html "gicvisu.jpg" "Visualization with KeplerMapper" - * */ - - -/* * Using e.g. - * - * \code $> python KeplerMapperVisuFromTxtFile.py && firefox SC.html - * \endcode */ diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt index 73728dc0..434637fa 100644 --- a/src/Nerve_GIC/example/CMakeLists.txt +++ b/src/Nerve_GIC/example/CMakeLists.txt @@ -3,26 +3,14 @@ project(Nerve_GIC_examples) if (NOT CGAL_VERSION VERSION_LESS 4.8.1) - add_executable ( Nerve Nerve.cpp ) add_executable ( CoordGIC CoordGIC.cpp ) add_executable ( FuncGIC FuncGIC.cpp ) - add_executable ( VoronoiGIC VoronoiGIC.cpp ) if (TBB_FOUND) - target_link_libraries(Nerve ${TBB_LIBRARIES}) target_link_libraries(CoordGIC ${TBB_LIBRARIES}) target_link_libraries(FuncGIC ${TBB_LIBRARIES}) - target_link_libraries(VoronoiGIC ${TBB_LIBRARIES}) endif() - file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - - add_test(NAME Nerve_GIC_example_nerve COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/human.off" "2" "10" "0.3") - - add_test(NAME Nerve_GIC_example_VoronoiGIC COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/human.off" "100") - add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0") diff --git a/src/Nerve_GIC/example/GIC.cpp b/src/Nerve_GIC/example/GIC.cpp deleted file mode 100644 index 2bc24a4d..00000000 --- a/src/Nerve_GIC/example/GIC.cpp +++ /dev/null @@ -1,95 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2017 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -#include -#include - -void usage(int nbArgs, char *const progName) { - std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.off threshold coordinate resolution gain [--v] \n"; - std::cerr << " i.e.: " << progName << " ../../data/points/human.off 0.075 2 0.075 0 --v \n"; - exit(-1); // ----- >> -} - -int main(int argc, char **argv) { - if ((argc != 6) && (argc != 7)) usage(argc, argv[0]); - - using Point = std::vector; - - std::string off_file_name(argv[1]); - double threshold = atof(argv[2]); - int coord = atoi(argv[3]); - double resolution = atof(argv[4]); - double gain = atof(argv[5]); - bool verb = 0; - if (argc == 7) verb = 1; - - // ---------------------------------------------------------------------------- - // Init of a graph induced complex from an OFF file - // ---------------------------------------------------------------------------- - - Gudhi::graph_induced_complex::Graph_induced_complex GIC; - GIC.set_verbose(verb); - - bool check = GIC.read_point_cloud(off_file_name); - - if (!check) { - std::cout << "Incorrect OFF file." << std::endl; - } else { - GIC.set_color_from_coordinate(coord); - GIC.set_function_from_coordinate(coord); - - GIC.set_graph_from_rips(threshold, Gudhi::Euclidean_distance()); - - GIC.set_resolution_with_interval_length(resolution); - GIC.set_gain(gain); - GIC.set_cover_from_function(); - - GIC.find_GIC_simplices(); - - GIC.plot_TXT_for_KeplerMapper(); - - Gudhi::Simplex_tree<> stree; - GIC.create_complex(stree); - - // ---------------------------------------------------------------------------- - // Display information about the graph induced complex - // ---------------------------------------------------------------------------- - - if (verb) { - std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() - << " simplices - " << stree.num_vertices() << " vertices." << std::endl; - - std::cout << "Iterator on graph induced complex simplices" << std::endl; - for (auto f_simplex : stree.filtration_simplex_range()) { - for (auto vertex : stree.simplex_vertex_range(f_simplex)) { - std::cout << vertex << " "; - } - std::cout << std::endl; - } - } - } - - return 0; -} diff --git a/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py deleted file mode 100755 index d2897774..00000000 --- a/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python - -import km -import numpy as np -from collections import defaultdict - -"""This file is part of the Gudhi Library. The Gudhi library - (Geometric Understanding in Higher Dimensions) is a generic C++ - library for computational topology. - - Author(s): Mathieu Carriere - - Copyright (C) 2017 INRIA - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . -""" - -__author__ = "Mathieu Carriere" -__copyright__ = "Copyright (C) 2017 INRIA" -__license__ = "GPL v3" - -network = {} -mapper = km.KeplerMapper(verbose=0) -data = np.zeros((3,3)) -projected_data = mapper.fit_transform( data, projection="sum", scaler=None ) - -f = open('SC.txt','r') -nodes = defaultdict(list) -links = defaultdict(list) -custom = defaultdict(list) - -dat = f.readline() -lens = f.readline() -color = f.readline(); -param = [float(i) for i in f.readline().split(" ")] - -nums = [int(i) for i in f.readline().split(" ")] -num_nodes = nums[0] -num_edges = nums[1] - -for i in range(0,num_nodes): - point = [float(j) for j in f.readline().split(" ")] - nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ] - links[ str(int(point[0])) ] = [] - custom[ int(point[0]) ] = point[1] - -m = min([custom[i] for i in range(0,num_nodes)]) -M = max([custom[i] for i in range(0,num_nodes)]) - -for i in range(0,num_edges): - edge = [int(j) for j in f.readline().split(" ")] - links[ str(edge[0]) ].append( str(edge[1]) ) - links[ str(edge[1]) ].append( str(edge[0]) ) - -network["nodes"] = nodes -network["links"] = links -network["meta"] = lens - -mapper.visualize(network, color_function = color, path_html="SC.html", title=dat, -graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0, -height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M) diff --git a/src/Nerve_GIC/example/Nerve.cpp b/src/Nerve_GIC/example/Nerve.cpp deleted file mode 100644 index 6abdedc7..00000000 --- a/src/Nerve_GIC/example/Nerve.cpp +++ /dev/null @@ -1,96 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2017 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -#include -#include - -void usage(int nbArgs, char *const progName) { - std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [--v] \n"; - std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 --v \n"; - exit(-1); // ----- >> -} - -int main(int argc, char **argv) { - if ((argc != 5) && (argc != 6)) usage(argc, argv[0]); - - using Point = std::vector; - - std::string off_file_name(argv[1]); - int coord = atoi(argv[2]); - int resolution = atoi(argv[3]); - double gain = atof(argv[4]); - bool verb = 0; - if (argc == 6) verb = 1; - - // -------------------------------- - // Init of a Nerve from an OFF file - // -------------------------------- - - Gudhi::cover_complex::Cover_complex SC; - SC.set_verbose(verb); - - bool check = SC.read_point_cloud(off_file_name); - - if (!check) { - std::cout << "Incorrect OFF file." << std::endl; - } else { - SC.set_type("Nerve"); - - SC.set_color_from_coordinate(coord); - SC.set_function_from_coordinate(coord); - - SC.set_graph_from_OFF(); - SC.set_resolution_with_interval_number(resolution); - SC.set_gain(gain); - SC.set_cover_from_function(); - - SC.find_simplices(); - - SC.write_info(); - - Gudhi::Simplex_tree<> stree; - SC.create_complex(stree); - SC.compute_PD(); - - // ---------------------------------------------------------------------------- - // Display information about the graph induced complex - // ---------------------------------------------------------------------------- - - if (verb) { - std::cout << "Nerve is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - " - << stree.num_vertices() << " vertices." << std::endl; - - std::cout << "Iterator on Nerve simplices" << std::endl; - for (auto f_simplex : stree.filtration_simplex_range()) { - for (auto vertex : stree.simplex_vertex_range(f_simplex)) { - std::cout << vertex << " "; - } - std::cout << std::endl; - } - } - } - - return 0; -} diff --git a/src/Nerve_GIC/example/Nerve.txt b/src/Nerve_GIC/example/Nerve.txt deleted file mode 100644 index 839ff45e..00000000 --- a/src/Nerve_GIC/example/Nerve.txt +++ /dev/null @@ -1,63 +0,0 @@ -Min function value = -0.979672 and Max function value = 0.816414 -Interval 0 = [-0.979672, -0.761576] -Interval 1 = [-0.838551, -0.581967] -Interval 2 = [-0.658942, -0.402359] -Interval 3 = [-0.479334, -0.22275] -Interval 4 = [-0.299725, -0.0431415] -Interval 5 = [-0.120117, 0.136467] -Interval 6 = [0.059492, 0.316076] -Interval 7 = [0.239101, 0.495684] -Interval 8 = [0.418709, 0.675293] -Interval 9 = [0.598318, 0.816414] -Computing preimages... -Computing connected components... -.txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox. -5 interval(s) in dimension 0: - [-0.909111, 0.00817529] - [-0.171433, 0.367392] - [-0.171433, 0.367392] - [-0.909111, 0.745853] -0 interval(s) in dimension 1: -Nerve is of dimension 1 - 41 simplices - 21 vertices. -Iterator on Nerve simplices -1 -0 -4 -4 0 -2 -2 1 -8 -8 2 -5 -5 4 -9 -9 8 -13 -13 5 -14 -14 9 -19 -19 13 -25 -32 -20 -32 20 -33 -33 25 -26 -26 14 -26 19 -42 -42 26 -34 -34 33 -27 -27 20 -35 -35 27 -35 34 -42 35 -44 -44 35 -54 -54 44 \ No newline at end of file diff --git a/src/Nerve_GIC/example/VoronoiGIC.cpp b/src/Nerve_GIC/example/VoronoiGIC.cpp deleted file mode 100644 index 32431cc2..00000000 --- a/src/Nerve_GIC/example/VoronoiGIC.cpp +++ /dev/null @@ -1,90 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Mathieu Carrière - * - * Copyright (C) 2017 INRIA - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -#include -#include - -void usage(int nbArgs, char *const progName) { - std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.off N [--v] \n"; - std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 --v \n"; - exit(-1); // ----- >> -} - -int main(int argc, char **argv) { - if ((argc != 3) && (argc != 4)) usage(argc, argv[0]); - - using Point = std::vector; - - std::string off_file_name(argv[1]); - int m = atoi(argv[2]); - bool verb = 0; - if (argc == 4) verb = 1; - - // ---------------------------------------------------------------------------- - // Init of a graph induced complex from an OFF file - // ---------------------------------------------------------------------------- - - Gudhi::cover_complex::Cover_complex GIC; - GIC.set_verbose(verb); - - bool check = GIC.read_point_cloud(off_file_name); - - if (!check) { - std::cout << "Incorrect OFF file." << std::endl; - } else { - GIC.set_type("GIC"); - - GIC.set_color_from_coordinate(); - - GIC.set_graph_from_OFF(); - GIC.set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m); - - GIC.find_simplices(); - - GIC.plot_OFF(); - - Gudhi::Simplex_tree<> stree; - GIC.create_complex(stree); - - // ---------------------------------------------------------------------------- - // Display information about the graph induced complex - // ---------------------------------------------------------------------------- - - if (verb) { - std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() - << " simplices - " << stree.num_vertices() << " vertices." << std::endl; - - std::cout << "Iterator on graph induced complex simplices" << std::endl; - for (auto f_simplex : stree.filtration_simplex_range()) { - for (auto vertex : stree.simplex_vertex_range(f_simplex)) { - std::cout << vertex << " "; - } - std::cout << std::endl; - } - } - } - - return 0; -} diff --git a/src/Nerve_GIC/example/km.py b/src/Nerve_GIC/example/km.py deleted file mode 100755 index 53024aab..00000000 --- a/src/Nerve_GIC/example/km.py +++ /dev/null @@ -1,390 +0,0 @@ -from __future__ import division -import numpy as np -from collections import defaultdict -import json -import itertools -from sklearn import cluster, preprocessing, manifold -from datetime import datetime -import sys - -class KeplerMapper(object): - # With this class you can build topological networks from (high-dimensional) data. - # - # 1) Fit a projection/lens/function to a dataset and transform it. - # For instance "mean_of_row(x) for x in X" - # 2) Map this projection with overlapping intervals/hypercubes. - # Cluster the points inside the interval - # (Note: we cluster on the inverse image/original data to lessen projection loss). - # If two clusters/nodes have the same members (due to the overlap), then: - # connect these with an edge. - # 3) Visualize the network using HTML and D3.js. - # - # functions - # --------- - # fit_transform: Create a projection (lens) from a dataset - # map: Apply Mapper algorithm on this projection and build a simplicial complex - # visualize: Turns the complex dictionary into a HTML/D3.js visualization - - def __init__(self, verbose=2): - self.verbose = verbose - - self.chunk_dist = [] - self.overlap_dist = [] - self.d = [] - self.nr_cubes = 0 - self.overlap_perc = 0 - self.clusterer = False - - def fit_transform(self, X, projection="sum", scaler=preprocessing.MinMaxScaler()): - # Creates the projection/lens from X. - # - # Input: X. Input features as a numpy array. - # Output: projected_X. original data transformed to a projection (lens). - # - # parameters - # ---------- - # projection: Projection parameter is either a string, - # a scikit class with fit_transform, like manifold.TSNE(), - # or a list of dimension indices. - # scaler: if None, do no scaling, else apply scaling to the projection - # Default: Min-Max scaling - - self.scaler = scaler - self.projection = str(projection) - - # Detect if projection is a class (for scikit-learn) - #if str(type(projection))[1:6] == "class": #TODO: de-ugly-fy - # reducer = projection - # if self.verbose > 0: - # try: - # projection.set_params(**{"verbose":self.verbose}) - # except: - # pass - # print("\n..Projecting data using: \n\t%s\n"%str(projection)) - # X = reducer.fit_transform(X) - - # Detect if projection is a string (for standard functions) - if isinstance(projection, str): - if self.verbose > 0: - print("\n..Projecting data using: %s"%(projection)) - # Stats lenses - if projection == "sum": # sum of row - X = np.sum(X, axis=1).reshape((X.shape[0],1)) - if projection == "mean": # mean of row - X = np.mean(X, axis=1).reshape((X.shape[0],1)) - if projection == "median": # mean of row - X = np.median(X, axis=1).reshape((X.shape[0],1)) - if projection == "max": # max of row - X = np.max(X, axis=1).reshape((X.shape[0],1)) - if projection == "min": # min of row - X = np.min(X, axis=1).reshape((X.shape[0],1)) - if projection == "std": # std of row - X = np.std(X, axis=1).reshape((X.shape[0],1)) - - if projection == "dist_mean": # Distance of x to mean of X - X_mean = np.mean(X, axis=0) - X = np.sum(np.sqrt((X - X_mean)**2), axis=1).reshape((X.shape[0],1)) - - # Detect if projection is a list (with dimension indices) - if isinstance(projection, list): - if self.verbose > 0: - print("\n..Projecting data using: %s"%(str(projection))) - X = X[:,np.array(projection)] - - # Scaling - if scaler is not None: - if self.verbose > 0: - print("\n..Scaling with: %s\n"%str(scaler)) - X = scaler.fit_transform(X) - - return X - - def map(self, projected_X, inverse_X=None, clusterer=cluster.DBSCAN(eps=0.5,min_samples=3), nr_cubes=10, overlap_perc=0.1): - # This maps the data to a simplicial complex. Returns a dictionary with nodes and links. - # - # Input: projected_X. A Numpy array with the projection/lens. - # Output: complex. A dictionary with "nodes", "links" and "meta information" - # - # parameters - # ---------- - # projected_X projected_X. A Numpy array with the projection/lens. Required. - # inverse_X Numpy array or None. If None then the projection itself is used for clustering. - # clusterer Scikit-learn API compatible clustering algorithm. Default: DBSCAN - # nr_cubes Int. The number of intervals/hypercubes to create. - # overlap_perc Float. The percentage of overlap "between" the intervals/hypercubes. - - start = datetime.now() - - # Helper function - def cube_coordinates_all(nr_cubes, nr_dimensions): - # Helper function to get origin coordinates for our intervals/hypercubes - # Useful for looping no matter the number of cubes or dimensions - # Example: if there are 4 cubes per dimension and 3 dimensions - # return the bottom left (origin) coordinates of 64 hypercubes, - # as a sorted list of Numpy arrays - # TODO: elegance-ify... - l = [] - for x in range(nr_cubes): - l += [x] * nr_dimensions - return [np.array(list(f)) for f in sorted(set(itertools.permutations(l,nr_dimensions)))] - - nodes = defaultdict(list) - links = defaultdict(list) - complex = {} - self.nr_cubes = nr_cubes - self.clusterer = clusterer - self.overlap_perc = overlap_perc - - if self.verbose > 0: - print("Mapping on data shaped %s using dimensions\n"%(str(projected_X.shape))) - - # If inverse image is not provided, we use the projection as the inverse image (suffer projection loss) - if inverse_X is None: - inverse_X = projected_X - - # We chop up the min-max column ranges into 'nr_cubes' parts - self.chunk_dist = (np.max(projected_X, axis=0) - np.min(projected_X, axis=0))/nr_cubes - - # We calculate the overlapping windows distance - self.overlap_dist = self.overlap_perc * self.chunk_dist - - # We find our starting point - self.d = np.min(projected_X, axis=0) - - # Use a dimension index array on the projected X - # (For now this uses the entire dimensionality, but we keep for experimentation) - di = np.array([x for x in range(projected_X.shape[1])]) - - # Prefix'ing the data with ID's - ids = np.array([x for x in range(projected_X.shape[0])]) - projected_X = np.c_[ids,projected_X] - inverse_X = np.c_[ids,inverse_X] - - # Subdivide the projected data X in intervals/hypercubes with overlap - if self.verbose > 0: - total_cubes = len(cube_coordinates_all(nr_cubes,projected_X.shape[1])) - print("Creating %s hypercubes."%total_cubes) - - for i, coor in enumerate(cube_coordinates_all(nr_cubes,di.shape[0])): - # Slice the hypercube - hypercube = projected_X[ np.invert(np.any((projected_X[:,di+1] >= self.d[di] + (coor * self.chunk_dist[di])) & - (projected_X[:,di+1] < self.d[di] + (coor * self.chunk_dist[di]) + self.chunk_dist[di] + self.overlap_dist[di]) == False, axis=1 )) ] - - if self.verbose > 1: - print("There are %s points in cube_%s / %s with starting range %s"% - (hypercube.shape[0],i,total_cubes,self.d[di] + (coor * self.chunk_dist[di]))) - - # If at least one sample inside the hypercube - if hypercube.shape[0] > 0: - # Cluster the data point(s) in the cube, skipping the id-column - # Note that we apply clustering on the inverse image (original data samples) that fall inside the cube. - inverse_x = inverse_X[[int(nn) for nn in hypercube[:,0]]] - - clusterer.fit(inverse_x[:,1:]) - - if self.verbose > 1: - print("Found %s clusters in cube_%s\n"%(np.unique(clusterer.labels_[clusterer.labels_ > -1]).shape[0],i)) - - #Now for every (sample id in cube, predicted cluster label) - for a in np.c_[hypercube[:,0],clusterer.labels_]: - if a[1] != -1: #if not predicted as noise - cluster_id = str(coor[0])+"_"+str(i)+"_"+str(a[1])+"_"+str(coor)+"_"+str(self.d[di] + (coor * self.chunk_dist[di])) # TODO: de-rudimentary-ify - nodes[cluster_id].append( int(a[0]) ) # Append the member id's as integers - else: - if self.verbose > 1: - print("Cube_%s is empty.\n"%(i)) - - # Create links when clusters from different hypercubes have members with the same sample id. - candidates = itertools.combinations(nodes.keys(),2) - for candidate in candidates: - # if there are non-unique members in the union - if len(nodes[candidate[0]]+nodes[candidate[1]]) != len(set(nodes[candidate[0]]+nodes[candidate[1]])): - links[candidate[0]].append( candidate[1] ) - - # Reporting - if self.verbose > 0: - nr_links = 0 - for k in links: - nr_links += len(links[k]) - print("\ncreated %s edges and %s nodes in %s."%(nr_links,len(nodes),str(datetime.now()-start))) - - complex["nodes"] = nodes - complex["links"] = links - complex["meta"] = self.projection - - return complex - - def visualize(self, complex, color_function="", path_html="mapper_visualization_output.html", title="My Data", - graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=None, width_html=0, - height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=0,gain=0,minimum=0,maximum=0): - # Turns the dictionary 'complex' in a html file with d3.js - # - # Input: complex. Dictionary (output from calling .map()) - # Output: a HTML page saved as a file in 'path_html'. - # - # parameters - # ---------- - # color_function string. Not fully implemented. Default: "" (distance to origin) - # path_html file path as string. Where to save the HTML page. - # title string. HTML page document title and first heading. - # graph_link_distance int. Edge length. - # graph_gravity float. "Gravity" to center of layout. - # graph_charge int. charge between nodes. - # custom_tooltips None or Numpy Array. You could use "y"-label array for this. - # width_html int. Width of canvas. Default: 0 (full width) - # height_html int. Height of canvas. Default: 0 (full height) - # show_tooltips bool. default:True - # show_title bool. default:True - # show_meta bool. default:True - - # Format JSON for D3 graph - json_s = {} - json_s["nodes"] = [] - json_s["links"] = [] - k2e = {} # a key to incremental int dict, used for id's when linking - - for e, k in enumerate(complex["nodes"]): - # Tooltip and node color formatting, TODO: de-mess-ify - if custom_tooltips is not None: - tooltip_s = "

Cluster %s

"%k + " ".join(str(custom_tooltips[complex["nodes"][k][0]]).split(" ")) - if maximum == minimum: - tooltip_i = 0 - else: - tooltip_i = int(30*(custom_tooltips[complex["nodes"][k][0]]-minimum)/(maximum-minimum)) - json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(complex["nodes"][k][2])), "color": tooltip_i}) - else: - tooltip_s = "

Cluster %s

Contains %s members."%(k,len(complex["nodes"][k])) - json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(len(complex["nodes"][k]))), "color": str(k.split("_")[0])}) - k2e[k] = e - for k in complex["links"]: - for link in complex["links"][k]: - json_s["links"].append({"source": k2e[k], "target":k2e[link],"value":1}) - - # Width and height of graph in HTML output - if width_html == 0: - width_css = "100%" - width_js = 'document.getElementById("holder").offsetWidth-20' - else: - width_css = "%spx" % width_html - width_js = "%s" % width_html - if height_html == 0: - height_css = "100%" - height_js = 'document.getElementById("holder").offsetHeight-20' - else: - height_css = "%spx" % height_html - height_js = "%s" % height_html - - # Whether to show certain UI elements or not - if show_tooltips == False: - tooltips_display = "display: none;" - else: - tooltips_display = "" - - if show_meta == False: - meta_display = "display: none;" - else: - meta_display = "" - - if show_title == False: - title_display = "display: none;" - else: - title_display = "" - - with open(path_html,"wb") as outfile: - html = """ - - - %s | KeplerMapper - - - -
-

%s

-

- Lens
%s

- Length of intervals
%s

- Overlap percentage
%s%%

- Color Function
%s -

-
- - """%(title,width_css, height_css, title_display, meta_display, tooltips_display, title,complex["meta"],res,gain*100,color_function,width_js,height_js,graph_charge,graph_link_distance,graph_gravity,json.dumps(json_s)) - outfile.write(html.encode("utf-8")) - if self.verbose > 0: - print("\nWrote d3.js graph to '%s'"%path_html) diff --git a/src/Nerve_GIC/example/km.py.COPYRIGHT b/src/Nerve_GIC/example/km.py.COPYRIGHT deleted file mode 100644 index bef7b121..00000000 --- a/src/Nerve_GIC/example/km.py.COPYRIGHT +++ /dev/null @@ -1,26 +0,0 @@ -km.py is a fork of https://github.com/MLWave/kepler-mapper. -Only the visualization part has been kept (Mapper part has been removed). - -This file has te following Copyright : - -The MIT License (MIT) - -Copyright (c) 2015 Triskelion - HJ van Veen - info@mlwave.com - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt new file mode 100644 index 00000000..a0508dc2 --- /dev/null +++ b/src/Nerve_GIC/utilities/CMakeLists.txt @@ -0,0 +1,22 @@ +cmake_minimum_required(VERSION 2.6) +project(Nerve_GIC_examples) + +if (NOT CGAL_VERSION VERSION_LESS 4.8.1) + + add_executable ( Nerve Nerve.cpp ) + add_executable ( VoronoiGIC VoronoiGIC.cpp ) + + if (TBB_FOUND) + target_link_libraries(Nerve ${TBB_LIBRARIES}) + target_link_libraries(VoronoiGIC ${TBB_LIBRARIES}) + endif() + + file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + + add_test(NAME Nerve_GIC_utilities_nerve COMMAND $ + "${CMAKE_SOURCE_DIR}/data/points/human.off" "2" "10" "0.3") + + add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $ + "${CMAKE_SOURCE_DIR}/data/points/human.off" "100") + +endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) diff --git a/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py new file mode 100755 index 00000000..d2897774 --- /dev/null +++ b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +import km +import numpy as np +from collections import defaultdict + +"""This file is part of the Gudhi Library. The Gudhi library + (Geometric Understanding in Higher Dimensions) is a generic C++ + library for computational topology. + + Author(s): Mathieu Carriere + + Copyright (C) 2017 INRIA + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +__author__ = "Mathieu Carriere" +__copyright__ = "Copyright (C) 2017 INRIA" +__license__ = "GPL v3" + +network = {} +mapper = km.KeplerMapper(verbose=0) +data = np.zeros((3,3)) +projected_data = mapper.fit_transform( data, projection="sum", scaler=None ) + +f = open('SC.txt','r') +nodes = defaultdict(list) +links = defaultdict(list) +custom = defaultdict(list) + +dat = f.readline() +lens = f.readline() +color = f.readline(); +param = [float(i) for i in f.readline().split(" ")] + +nums = [int(i) for i in f.readline().split(" ")] +num_nodes = nums[0] +num_edges = nums[1] + +for i in range(0,num_nodes): + point = [float(j) for j in f.readline().split(" ")] + nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ] + links[ str(int(point[0])) ] = [] + custom[ int(point[0]) ] = point[1] + +m = min([custom[i] for i in range(0,num_nodes)]) +M = max([custom[i] for i in range(0,num_nodes)]) + +for i in range(0,num_edges): + edge = [int(j) for j in f.readline().split(" ")] + links[ str(edge[0]) ].append( str(edge[1]) ) + links[ str(edge[1]) ].append( str(edge[0]) ) + +network["nodes"] = nodes +network["links"] = links +network["meta"] = lens + +mapper.visualize(network, color_function = color, path_html="SC.html", title=dat, +graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0, +height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M) diff --git a/src/Nerve_GIC/utilities/Nerve.cpp b/src/Nerve_GIC/utilities/Nerve.cpp new file mode 100644 index 00000000..6abdedc7 --- /dev/null +++ b/src/Nerve_GIC/utilities/Nerve.cpp @@ -0,0 +1,96 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include + +void usage(int nbArgs, char *const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [--v] \n"; + std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 --v \n"; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if ((argc != 5) && (argc != 6)) usage(argc, argv[0]); + + using Point = std::vector; + + std::string off_file_name(argv[1]); + int coord = atoi(argv[2]); + int resolution = atoi(argv[3]); + double gain = atof(argv[4]); + bool verb = 0; + if (argc == 6) verb = 1; + + // -------------------------------- + // Init of a Nerve from an OFF file + // -------------------------------- + + Gudhi::cover_complex::Cover_complex SC; + SC.set_verbose(verb); + + bool check = SC.read_point_cloud(off_file_name); + + if (!check) { + std::cout << "Incorrect OFF file." << std::endl; + } else { + SC.set_type("Nerve"); + + SC.set_color_from_coordinate(coord); + SC.set_function_from_coordinate(coord); + + SC.set_graph_from_OFF(); + SC.set_resolution_with_interval_number(resolution); + SC.set_gain(gain); + SC.set_cover_from_function(); + + SC.find_simplices(); + + SC.write_info(); + + Gudhi::Simplex_tree<> stree; + SC.create_complex(stree); + SC.compute_PD(); + + // ---------------------------------------------------------------------------- + // Display information about the graph induced complex + // ---------------------------------------------------------------------------- + + if (verb) { + std::cout << "Nerve is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - " + << stree.num_vertices() << " vertices." << std::endl; + + std::cout << "Iterator on Nerve simplices" << std::endl; + for (auto f_simplex : stree.filtration_simplex_range()) { + for (auto vertex : stree.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } + } + } + + return 0; +} diff --git a/src/Nerve_GIC/utilities/Nerve.txt b/src/Nerve_GIC/utilities/Nerve.txt new file mode 100644 index 00000000..839ff45e --- /dev/null +++ b/src/Nerve_GIC/utilities/Nerve.txt @@ -0,0 +1,63 @@ +Min function value = -0.979672 and Max function value = 0.816414 +Interval 0 = [-0.979672, -0.761576] +Interval 1 = [-0.838551, -0.581967] +Interval 2 = [-0.658942, -0.402359] +Interval 3 = [-0.479334, -0.22275] +Interval 4 = [-0.299725, -0.0431415] +Interval 5 = [-0.120117, 0.136467] +Interval 6 = [0.059492, 0.316076] +Interval 7 = [0.239101, 0.495684] +Interval 8 = [0.418709, 0.675293] +Interval 9 = [0.598318, 0.816414] +Computing preimages... +Computing connected components... +.txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox. +5 interval(s) in dimension 0: + [-0.909111, 0.00817529] + [-0.171433, 0.367392] + [-0.171433, 0.367392] + [-0.909111, 0.745853] +0 interval(s) in dimension 1: +Nerve is of dimension 1 - 41 simplices - 21 vertices. +Iterator on Nerve simplices +1 +0 +4 +4 0 +2 +2 1 +8 +8 2 +5 +5 4 +9 +9 8 +13 +13 5 +14 +14 9 +19 +19 13 +25 +32 +20 +32 20 +33 +33 25 +26 +26 14 +26 19 +42 +42 26 +34 +34 33 +27 +27 20 +35 +35 27 +35 34 +42 35 +44 +44 35 +54 +54 44 \ No newline at end of file diff --git a/src/Nerve_GIC/utilities/VoronoiGIC.cpp b/src/Nerve_GIC/utilities/VoronoiGIC.cpp new file mode 100644 index 00000000..32431cc2 --- /dev/null +++ b/src/Nerve_GIC/utilities/VoronoiGIC.cpp @@ -0,0 +1,90 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include + +void usage(int nbArgs, char *const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " filename.off N [--v] \n"; + std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 --v \n"; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if ((argc != 3) && (argc != 4)) usage(argc, argv[0]); + + using Point = std::vector; + + std::string off_file_name(argv[1]); + int m = atoi(argv[2]); + bool verb = 0; + if (argc == 4) verb = 1; + + // ---------------------------------------------------------------------------- + // Init of a graph induced complex from an OFF file + // ---------------------------------------------------------------------------- + + Gudhi::cover_complex::Cover_complex GIC; + GIC.set_verbose(verb); + + bool check = GIC.read_point_cloud(off_file_name); + + if (!check) { + std::cout << "Incorrect OFF file." << std::endl; + } else { + GIC.set_type("GIC"); + + GIC.set_color_from_coordinate(); + + GIC.set_graph_from_OFF(); + GIC.set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m); + + GIC.find_simplices(); + + GIC.plot_OFF(); + + Gudhi::Simplex_tree<> stree; + GIC.create_complex(stree); + + // ---------------------------------------------------------------------------- + // Display information about the graph induced complex + // ---------------------------------------------------------------------------- + + if (verb) { + std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() + << " simplices - " << stree.num_vertices() << " vertices." << std::endl; + + std::cout << "Iterator on graph induced complex simplices" << std::endl; + for (auto f_simplex : stree.filtration_simplex_range()) { + for (auto vertex : stree.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } + } + } + + return 0; +} diff --git a/src/Nerve_GIC/utilities/km.py b/src/Nerve_GIC/utilities/km.py new file mode 100755 index 00000000..53024aab --- /dev/null +++ b/src/Nerve_GIC/utilities/km.py @@ -0,0 +1,390 @@ +from __future__ import division +import numpy as np +from collections import defaultdict +import json +import itertools +from sklearn import cluster, preprocessing, manifold +from datetime import datetime +import sys + +class KeplerMapper(object): + # With this class you can build topological networks from (high-dimensional) data. + # + # 1) Fit a projection/lens/function to a dataset and transform it. + # For instance "mean_of_row(x) for x in X" + # 2) Map this projection with overlapping intervals/hypercubes. + # Cluster the points inside the interval + # (Note: we cluster on the inverse image/original data to lessen projection loss). + # If two clusters/nodes have the same members (due to the overlap), then: + # connect these with an edge. + # 3) Visualize the network using HTML and D3.js. + # + # functions + # --------- + # fit_transform: Create a projection (lens) from a dataset + # map: Apply Mapper algorithm on this projection and build a simplicial complex + # visualize: Turns the complex dictionary into a HTML/D3.js visualization + + def __init__(self, verbose=2): + self.verbose = verbose + + self.chunk_dist = [] + self.overlap_dist = [] + self.d = [] + self.nr_cubes = 0 + self.overlap_perc = 0 + self.clusterer = False + + def fit_transform(self, X, projection="sum", scaler=preprocessing.MinMaxScaler()): + # Creates the projection/lens from X. + # + # Input: X. Input features as a numpy array. + # Output: projected_X. original data transformed to a projection (lens). + # + # parameters + # ---------- + # projection: Projection parameter is either a string, + # a scikit class with fit_transform, like manifold.TSNE(), + # or a list of dimension indices. + # scaler: if None, do no scaling, else apply scaling to the projection + # Default: Min-Max scaling + + self.scaler = scaler + self.projection = str(projection) + + # Detect if projection is a class (for scikit-learn) + #if str(type(projection))[1:6] == "class": #TODO: de-ugly-fy + # reducer = projection + # if self.verbose > 0: + # try: + # projection.set_params(**{"verbose":self.verbose}) + # except: + # pass + # print("\n..Projecting data using: \n\t%s\n"%str(projection)) + # X = reducer.fit_transform(X) + + # Detect if projection is a string (for standard functions) + if isinstance(projection, str): + if self.verbose > 0: + print("\n..Projecting data using: %s"%(projection)) + # Stats lenses + if projection == "sum": # sum of row + X = np.sum(X, axis=1).reshape((X.shape[0],1)) + if projection == "mean": # mean of row + X = np.mean(X, axis=1).reshape((X.shape[0],1)) + if projection == "median": # mean of row + X = np.median(X, axis=1).reshape((X.shape[0],1)) + if projection == "max": # max of row + X = np.max(X, axis=1).reshape((X.shape[0],1)) + if projection == "min": # min of row + X = np.min(X, axis=1).reshape((X.shape[0],1)) + if projection == "std": # std of row + X = np.std(X, axis=1).reshape((X.shape[0],1)) + + if projection == "dist_mean": # Distance of x to mean of X + X_mean = np.mean(X, axis=0) + X = np.sum(np.sqrt((X - X_mean)**2), axis=1).reshape((X.shape[0],1)) + + # Detect if projection is a list (with dimension indices) + if isinstance(projection, list): + if self.verbose > 0: + print("\n..Projecting data using: %s"%(str(projection))) + X = X[:,np.array(projection)] + + # Scaling + if scaler is not None: + if self.verbose > 0: + print("\n..Scaling with: %s\n"%str(scaler)) + X = scaler.fit_transform(X) + + return X + + def map(self, projected_X, inverse_X=None, clusterer=cluster.DBSCAN(eps=0.5,min_samples=3), nr_cubes=10, overlap_perc=0.1): + # This maps the data to a simplicial complex. Returns a dictionary with nodes and links. + # + # Input: projected_X. A Numpy array with the projection/lens. + # Output: complex. A dictionary with "nodes", "links" and "meta information" + # + # parameters + # ---------- + # projected_X projected_X. A Numpy array with the projection/lens. Required. + # inverse_X Numpy array or None. If None then the projection itself is used for clustering. + # clusterer Scikit-learn API compatible clustering algorithm. Default: DBSCAN + # nr_cubes Int. The number of intervals/hypercubes to create. + # overlap_perc Float. The percentage of overlap "between" the intervals/hypercubes. + + start = datetime.now() + + # Helper function + def cube_coordinates_all(nr_cubes, nr_dimensions): + # Helper function to get origin coordinates for our intervals/hypercubes + # Useful for looping no matter the number of cubes or dimensions + # Example: if there are 4 cubes per dimension and 3 dimensions + # return the bottom left (origin) coordinates of 64 hypercubes, + # as a sorted list of Numpy arrays + # TODO: elegance-ify... + l = [] + for x in range(nr_cubes): + l += [x] * nr_dimensions + return [np.array(list(f)) for f in sorted(set(itertools.permutations(l,nr_dimensions)))] + + nodes = defaultdict(list) + links = defaultdict(list) + complex = {} + self.nr_cubes = nr_cubes + self.clusterer = clusterer + self.overlap_perc = overlap_perc + + if self.verbose > 0: + print("Mapping on data shaped %s using dimensions\n"%(str(projected_X.shape))) + + # If inverse image is not provided, we use the projection as the inverse image (suffer projection loss) + if inverse_X is None: + inverse_X = projected_X + + # We chop up the min-max column ranges into 'nr_cubes' parts + self.chunk_dist = (np.max(projected_X, axis=0) - np.min(projected_X, axis=0))/nr_cubes + + # We calculate the overlapping windows distance + self.overlap_dist = self.overlap_perc * self.chunk_dist + + # We find our starting point + self.d = np.min(projected_X, axis=0) + + # Use a dimension index array on the projected X + # (For now this uses the entire dimensionality, but we keep for experimentation) + di = np.array([x for x in range(projected_X.shape[1])]) + + # Prefix'ing the data with ID's + ids = np.array([x for x in range(projected_X.shape[0])]) + projected_X = np.c_[ids,projected_X] + inverse_X = np.c_[ids,inverse_X] + + # Subdivide the projected data X in intervals/hypercubes with overlap + if self.verbose > 0: + total_cubes = len(cube_coordinates_all(nr_cubes,projected_X.shape[1])) + print("Creating %s hypercubes."%total_cubes) + + for i, coor in enumerate(cube_coordinates_all(nr_cubes,di.shape[0])): + # Slice the hypercube + hypercube = projected_X[ np.invert(np.any((projected_X[:,di+1] >= self.d[di] + (coor * self.chunk_dist[di])) & + (projected_X[:,di+1] < self.d[di] + (coor * self.chunk_dist[di]) + self.chunk_dist[di] + self.overlap_dist[di]) == False, axis=1 )) ] + + if self.verbose > 1: + print("There are %s points in cube_%s / %s with starting range %s"% + (hypercube.shape[0],i,total_cubes,self.d[di] + (coor * self.chunk_dist[di]))) + + # If at least one sample inside the hypercube + if hypercube.shape[0] > 0: + # Cluster the data point(s) in the cube, skipping the id-column + # Note that we apply clustering on the inverse image (original data samples) that fall inside the cube. + inverse_x = inverse_X[[int(nn) for nn in hypercube[:,0]]] + + clusterer.fit(inverse_x[:,1:]) + + if self.verbose > 1: + print("Found %s clusters in cube_%s\n"%(np.unique(clusterer.labels_[clusterer.labels_ > -1]).shape[0],i)) + + #Now for every (sample id in cube, predicted cluster label) + for a in np.c_[hypercube[:,0],clusterer.labels_]: + if a[1] != -1: #if not predicted as noise + cluster_id = str(coor[0])+"_"+str(i)+"_"+str(a[1])+"_"+str(coor)+"_"+str(self.d[di] + (coor * self.chunk_dist[di])) # TODO: de-rudimentary-ify + nodes[cluster_id].append( int(a[0]) ) # Append the member id's as integers + else: + if self.verbose > 1: + print("Cube_%s is empty.\n"%(i)) + + # Create links when clusters from different hypercubes have members with the same sample id. + candidates = itertools.combinations(nodes.keys(),2) + for candidate in candidates: + # if there are non-unique members in the union + if len(nodes[candidate[0]]+nodes[candidate[1]]) != len(set(nodes[candidate[0]]+nodes[candidate[1]])): + links[candidate[0]].append( candidate[1] ) + + # Reporting + if self.verbose > 0: + nr_links = 0 + for k in links: + nr_links += len(links[k]) + print("\ncreated %s edges and %s nodes in %s."%(nr_links,len(nodes),str(datetime.now()-start))) + + complex["nodes"] = nodes + complex["links"] = links + complex["meta"] = self.projection + + return complex + + def visualize(self, complex, color_function="", path_html="mapper_visualization_output.html", title="My Data", + graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=None, width_html=0, + height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=0,gain=0,minimum=0,maximum=0): + # Turns the dictionary 'complex' in a html file with d3.js + # + # Input: complex. Dictionary (output from calling .map()) + # Output: a HTML page saved as a file in 'path_html'. + # + # parameters + # ---------- + # color_function string. Not fully implemented. Default: "" (distance to origin) + # path_html file path as string. Where to save the HTML page. + # title string. HTML page document title and first heading. + # graph_link_distance int. Edge length. + # graph_gravity float. "Gravity" to center of layout. + # graph_charge int. charge between nodes. + # custom_tooltips None or Numpy Array. You could use "y"-label array for this. + # width_html int. Width of canvas. Default: 0 (full width) + # height_html int. Height of canvas. Default: 0 (full height) + # show_tooltips bool. default:True + # show_title bool. default:True + # show_meta bool. default:True + + # Format JSON for D3 graph + json_s = {} + json_s["nodes"] = [] + json_s["links"] = [] + k2e = {} # a key to incremental int dict, used for id's when linking + + for e, k in enumerate(complex["nodes"]): + # Tooltip and node color formatting, TODO: de-mess-ify + if custom_tooltips is not None: + tooltip_s = "

Cluster %s

"%k + " ".join(str(custom_tooltips[complex["nodes"][k][0]]).split(" ")) + if maximum == minimum: + tooltip_i = 0 + else: + tooltip_i = int(30*(custom_tooltips[complex["nodes"][k][0]]-minimum)/(maximum-minimum)) + json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(complex["nodes"][k][2])), "color": tooltip_i}) + else: + tooltip_s = "

Cluster %s

Contains %s members."%(k,len(complex["nodes"][k])) + json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(len(complex["nodes"][k]))), "color": str(k.split("_")[0])}) + k2e[k] = e + for k in complex["links"]: + for link in complex["links"][k]: + json_s["links"].append({"source": k2e[k], "target":k2e[link],"value":1}) + + # Width and height of graph in HTML output + if width_html == 0: + width_css = "100%" + width_js = 'document.getElementById("holder").offsetWidth-20' + else: + width_css = "%spx" % width_html + width_js = "%s" % width_html + if height_html == 0: + height_css = "100%" + height_js = 'document.getElementById("holder").offsetHeight-20' + else: + height_css = "%spx" % height_html + height_js = "%s" % height_html + + # Whether to show certain UI elements or not + if show_tooltips == False: + tooltips_display = "display: none;" + else: + tooltips_display = "" + + if show_meta == False: + meta_display = "display: none;" + else: + meta_display = "" + + if show_title == False: + title_display = "display: none;" + else: + title_display = "" + + with open(path_html,"wb") as outfile: + html = """ + + + %s | KeplerMapper + + + +
+

%s

+

+ Lens
%s

+ Length of intervals
%s

+ Overlap percentage
%s%%

+ Color Function
%s +

+
+ + """%(title,width_css, height_css, title_display, meta_display, tooltips_display, title,complex["meta"],res,gain*100,color_function,width_js,height_js,graph_charge,graph_link_distance,graph_gravity,json.dumps(json_s)) + outfile.write(html.encode("utf-8")) + if self.verbose > 0: + print("\nWrote d3.js graph to '%s'"%path_html) diff --git a/src/Nerve_GIC/utilities/km.py.COPYRIGHT b/src/Nerve_GIC/utilities/km.py.COPYRIGHT new file mode 100644 index 00000000..bef7b121 --- /dev/null +++ b/src/Nerve_GIC/utilities/km.py.COPYRIGHT @@ -0,0 +1,26 @@ +km.py is a fork of https://github.com/MLWave/kepler-mapper. +Only the visualization part has been kept (Mapper part has been removed). + +This file has te following Copyright : + +The MIT License (MIT) + +Copyright (c) 2015 Triskelion - HJ van Veen - info@mlwave.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index b8ce8ea6..eb3258f8 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -2,28 +2,23 @@ cmake_minimum_required(VERSION 2.6) project(Persistence_representations_example) add_executable ( Persistence_representations_example_landscape_on_grid persistence_landscape_on_grid.cpp ) -target_link_libraries(Persistence_representations_example_landscape_on_grid ${Boost_SYSTEM_LIBRARY}) add_test(NAME Persistence_representations_example_landscape_on_grid COMMAND $) add_executable ( Persistence_representations_example_landscape persistence_landscape.cpp ) -target_link_libraries(Persistence_representations_example_landscape ${Boost_SYSTEM_LIBRARY}) add_test(NAME Persistence_representations_example_landscape COMMAND $) add_executable ( Persistence_representations_example_intervals persistence_intervals.cpp ) -target_link_libraries(Persistence_representations_example_intervals ${Boost_SYSTEM_LIBRARY}) add_test(NAME Persistence_representations_example_intervals COMMAND $ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers") add_executable ( Persistence_representations_example_vectors persistence_vectors.cpp ) -target_link_libraries(Persistence_representations_example_vectors ${Boost_SYSTEM_LIBRARY}) add_test(NAME Persistence_representations_example_vectors COMMAND $) add_executable ( Persistence_representations_example_heat_maps persistence_heat_maps.cpp ) -target_link_libraries(Persistence_representations_example_heat_maps ${Boost_SYSTEM_LIBRARY}) add_test(NAME Persistence_representations_example_heat_maps COMMAND $) diff --git a/src/Persistence_representations/test/CMakeLists.txt b/src/Persistence_representations/test/CMakeLists.txt index 4483de07..335a71ef 100644 --- a/src/Persistence_representations/test/CMakeLists.txt +++ b/src/Persistence_representations/test/CMakeLists.txt @@ -6,38 +6,38 @@ include(GUDHI_test_coverage) # copy data directory for tests purpose. file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) add_executable ( Persistence_intervals_test_unit persistence_intervals_test.cpp ) -target_link_libraries(Persistence_intervals_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +target_link_libraries(Persistence_intervals_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) gudhi_add_coverage_test(Persistence_intervals_test_unit) add_executable (Vector_representation_test_unit vector_representation_test.cpp ) -target_link_libraries(Vector_representation_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +target_link_libraries(Vector_representation_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) gudhi_add_coverage_test(Vector_representation_test_unit) add_executable (Persistence_lanscapes_test_unit persistence_lanscapes_test.cpp ) -target_link_libraries(Persistence_lanscapes_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +target_link_libraries(Persistence_lanscapes_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) gudhi_add_coverage_test(Persistence_lanscapes_test_unit) add_executable ( Persistence_lanscapes_on_grid_test_unit persistence_lanscapes_on_grid_test.cpp ) -target_link_libraries(Persistence_lanscapes_on_grid_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +target_link_libraries(Persistence_lanscapes_on_grid_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) gudhi_add_coverage_test(Persistence_lanscapes_on_grid_test_unit) add_executable (Persistence_heat_maps_test_unit persistence_heat_maps_test.cpp ) -target_link_libraries(Persistence_heat_maps_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +target_link_libraries(Persistence_heat_maps_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) gudhi_add_coverage_test(Persistence_heat_maps_test_unit) add_executable ( Read_persistence_from_file_test_unit read_persistence_from_file_test.cpp ) -target_link_libraries(Read_persistence_from_file_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +target_link_libraries(Read_persistence_from_file_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) gudhi_add_coverage_test(Read_persistence_from_file_test_unit) if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) add_executable (Persistence_intervals_with_distances_test_unit persistence_intervals_with_distances_test.cpp ) - target_link_libraries(Persistence_intervals_with_distances_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + target_link_libraries(Persistence_intervals_with_distances_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) if (TBB_FOUND) target_link_libraries(Persistence_intervals_with_distances_test_unit ${TBB_LIBRARIES}) endif(TBB_FOUND) diff --git a/src/common/doc/examples.h b/src/common/doc/examples.h new file mode 100644 index 00000000..40f202c7 --- /dev/null +++ b/src/common/doc/examples.h @@ -0,0 +1,99 @@ +// List of GUDHI examples - Doxygen needs at least a file tag to analyse comments +// In user_version, `find . -name "*.cpp"` in example and utilities folders +/*! @file Examples + * @example Alpha_complex/Alpha_complex_from_off.cpp + * @example Alpha_complex/Alpha_complex_from_points.cpp + * @example Bottleneck_distance/bottleneck_basic_example.cpp + * @example Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp + * @example Witness_complex/example_nearest_landmark_table.cpp + * @example Witness_complex/example_witness_complex_off.cpp + * @example Witness_complex/example_witness_complex_sphere.cpp + * @example Witness_complex/example_strong_witness_complex_off.cpp + * @example Simplex_tree/mini_simplex_tree.cpp + * @example Simplex_tree/graph_expansion_with_blocker.cpp + * @example Simplex_tree/simple_simplex_tree.cpp + * @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp + * @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp + * @example Simplex_tree/cech_complex_cgal_mini_sphere_3d.cpp + * @example Persistent_cohomology/plain_homology.cpp + * @example Persistent_cohomology/persistence_from_file.cpp + * @example Persistent_cohomology/rips_persistence_step_by_step.cpp + * @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp + * @example Persistent_cohomology/custom_persistence_sort.cpp + * @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp + * @example Persistent_cohomology/rips_multifield_persistence.cpp + * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp + * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp + * @example Skeleton_blocker/Skeleton_blocker_link.cpp + * @example Contraction/Garland_heckbert.cpp + * @example Contraction/Rips_contraction.cpp + * @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp + * @example common/example_CGAL_3D_points_off_reader.cpp + * @example common/example_vector_double_points_off_reader.cpp + * @example common/example_CGAL_points_off_reader.cpp + * @example Rips_complex/example_one_skeleton_rips_from_distance_matrix.cpp + * @example Rips_complex/example_one_skeleton_rips_from_points.cpp + * @example Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp + * @example Rips_complex/example_rips_complex_from_off_file.cpp + * @example Persistence_representations/persistence_intervals.cpp + * @example Persistence_representations/persistence_vectors.cpp + * @example Persistence_representations/persistence_heat_maps.cpp + * @example Persistence_representations/persistence_landscape_on_grid.cpp + * @example Persistence_representations/persistence_landscape.cpp + * @example Tangential_complex/example_basic.cpp + * @example Tangential_complex/example_with_perturb.cpp + * @example Subsampling/example_custom_kernel.cpp + * @example Subsampling/example_choose_n_farthest_points.cpp + * @example Subsampling/example_sparsify_point_set.cpp + * @example Subsampling/example_pick_n_random_points.cpp + * @example Nerve_GIC/CoordGIC.cpp + * @example Nerve_GIC/Nerve.cpp + * @example Nerve_GIC/FuncGIC.cpp + * @example Nerve_GIC/VoronoiGIC.cpp + * @example Spatial_searching/example_spatial_searching.cpp + * @example Alpha_complex/alpha_complex_3d_persistence.cpp + * @example Alpha_complex/alpha_complex_persistence.cpp + * @example Alpha_complex/weighted_periodic_alpha_complex_3d_persistence.cpp + * @example Alpha_complex/weighted_alpha_complex_3d_persistence.cpp + * @example Alpha_complex/periodic_alpha_complex_3d_persistence.cpp + * @example Alpha_complex/exact_alpha_complex_3d_persistence.cpp + * @example Bottleneck_distance/bottleneck_distance.cpp + * @example Witness_complex/weak_witness_persistence.cpp + * @example Witness_complex/strong_witness_persistence.cpp + * @example Bitmap_cubical_complex/cubical_complex_persistence.cpp + * @example Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp + * @example common/off_file_from_shape_generator.cpp + * @example Rips_complex/rips_distance_matrix_persistence.cpp + * @example Rips_complex/rips_persistence.cpp + * @example Persistence_representations/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp + * @example Persistence_representations/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp + * @example Persistence_representations/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp + * @example Persistence_representations/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp + * @example Persistence_representations/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp + * @example Persistence_representations/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp + * @example Persistence_representations/persistence_intervals/compute_number_of_dominant_intervals.cpp + * @example Persistence_representations/persistence_intervals/plot_persistence_Betti_numbers.cpp + * @example Persistence_representations/persistence_intervals/plot_persistence_intervals.cpp + * @example Persistence_representations/persistence_intervals/plot_histogram_of_intervals_lengths.cpp + * @example Persistence_representations/persistence_intervals/compute_bottleneck_distance.cpp + * @example Persistence_representations/persistence_heat_maps/create_pssk.cpp + * @example Persistence_representations/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp + * @example Persistence_representations/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp + * @example Persistence_representations/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp + * @example Persistence_representations/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp + * @example Persistence_representations/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp + * @example Persistence_representations/persistence_heat_maps/average_persistence_heat_maps.cpp + * @example Persistence_representations/persistence_heat_maps/plot_persistence_heat_map.cpp + * @example Persistence_representations/persistence_heat_maps/create_persistence_heat_maps.cpp + * @example Persistence_representations/persistence_vectors/plot_persistence_vectors.cpp + * @example Persistence_representations/persistence_vectors/compute_distance_of_persistence_vectors.cpp + * @example Persistence_representations/persistence_vectors/average_persistence_vectors.cpp + * @example Persistence_representations/persistence_vectors/create_persistence_vectors.cpp + * @example Persistence_representations/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp + * @example Persistence_representations/persistence_landscapes/average_landscapes.cpp + * @example Persistence_representations/persistence_landscapes/compute_scalar_product_of_landscapes.cpp + * @example Persistence_representations/persistence_landscapes/create_landscapes.cpp + * @example Persistence_representations/persistence_landscapes/compute_distance_of_landscapes.cpp + * @example Persistence_representations/persistence_landscapes/plot_landscapes.cpp + */ + diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h new file mode 100644 index 00000000..25675cc5 --- /dev/null +++ b/src/common/doc/installation.h @@ -0,0 +1,263 @@ +/*! \page installation GUDHI installation + * \tableofcontents + * As GUDHI is a header only library, there is no need to install the library. + * + * Examples of GUDHI headers inclusion can be found in \ref demos. + * + * \section compiling Compiling + * The library uses c++11 and requires Boost with version 1.48.0 or + * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015. + * + * \subsection demos Demos and examples + * To build the demos and examples, run the following commands in a terminal: +\verbatim cd /path-to-gudhi/ +mkdir build +cd build/ +cmake .. +make \endverbatim + * A list of examples is available here. + * + * \subsection testsuites Test suites + * To test your build, run the following command in a terminal: + * \verbatim make test \endverbatim + * + * \subsection documentationgeneration Documentation + * To generate the documentation, Doxygen is required. + * Run the following command in a terminal: +\verbatim +make doxygen +# Documentation will be generated in the folder YYYY-MM-DD-hh-mm-ss_GUDHI_X.Y.Z/doc/html/ +# You can customize the directory name by calling `cmake -DUSER_VERSION_DIR=/my/custom/folder` +\endverbatim + * + * \section optionallibrary Optional third-party library + * \subsection gmp GMP + * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision + * arithmetic, operating on signed integers, rational numbers, and floating point numbers. + * + * The following example requires the GNU Multiple Precision Arithmetic + * Library (GMP) and will not be built if GMP is not installed: + * \li + * Persistent_cohomology/rips_multifield_persistence.cpp + * + * Having GMP version 4.2 or higher installed is recommended. + * + * \subsection cgal CGAL + * The \ref alpha_complex data structure, \ref bottleneck_distance, and few examples requires CGAL, which is a C++ + * library which provides easy access to efficient and reliable geometric algorithms. + * + * \note There is no need to install CGAL, you can just cmake . && make CGAL (or even + * cmake -DCGAL_HEADER_ONLY=ON . for CGAL version ≥ 4.8.0), thereafter you will be able to compile + * GUDHI by calling cmake -DCGAL_DIR=/your/path/to/CGAL-X.Y .. && make + * + * Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to + * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html + * + * The following examples/utilities require the Computational Geometry Algorithms + * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed: + * \li + * Alpha_complex/alpha_complex_3d_persistence.cpp + * \li + * Alpha_complex/exact_alpha_complex_3d_persistence.cpp + * \li + * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp + * + * The following examples/utilities require CGAL version ≥ 4.6.0: + * \li + * Witness_complex/strong_witness_persistence.cpp + * \li + * Witness_complex/weak_witness_persistence.cpp + * \li + * Witness_complex/example_strong_witness_complex_off.cpp + * \li + * Witness_complex/example_witness_complex_off.cpp + * \li + * Witness_complex/example_witness_complex_sphere.cpp + * + * The following example requires CGAL version ≥ 4.7.0: + * \li + * Alpha_complex/Alpha_complex_from_off.cpp + * \li + * Alpha_complex/Alpha_complex_from_points.cpp + * \li + * Alpha_complex/alpha_complex_persistence.cpp + * \li + * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/custom_persistence_sort.cpp + * + * The following example requires CGAL version ≥ 4.8.1: + * \li + * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp + * \li + * Bottleneck_distance/bottleneck_basic_example.cpp + * \li + * Bottleneck_distance/bottleneck_distance.cpp + * \li + * Nerve_GIC/CoordGIC.cpp + * \li + * Nerve_GIC/FuncGIC.cpp + * \li + * Nerve_GIC/Nerve.cpp + * \li + * Nerve_GIC/VoronoiGIC.cpp + * \li + * Spatial_searching/example_spatial_searching.cpp + * \li + * Subsampling/example_choose_n_farthest_points.cpp + * \li + * Subsampling/example_custom_kernel.cpp + * \li + * Subsampling/example_pick_n_random_points.cpp + * \li + * Subsampling/example_sparsify_point_set.cpp + * \li + * Tangential_complex/example_basic.cpp + * \li + * Tangential_complex/example_with_perturb.cpp + * + * \subsection eigen3 Eigen3 + * The \ref alpha_complex data structure and few examples requires + * Eigen3 is a C++ template library for linear algebra: + * matrices, vectors, numerical solvers, and related algorithms. + * + * The following examples/utilities require the Eigen3 and will not be + * built if Eigen3 is not installed: + * \li + * Alpha_complex/Alpha_complex_from_off.cpp + * \li + * Alpha_complex/Alpha_complex_from_points.cpp + * \li + * Alpha_complex/alpha_complex_persistence.cpp + * \li + * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp + * \li + * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp + * \li + * Persistent_cohomology/custom_persistence_sort.cpp + * \li + * Spatial_searching/example_spatial_searching.cpp + * \li + * Subsampling/example_choose_n_farthest_points.cpp + * \li + * Subsampling/example_custom_kernel.cpp + * \li + * Subsampling/example_pick_n_random_points.cpp + * \li + * Subsampling/example_sparsify_point_set.cpp + * \li + * Tangential_complex/example_basic.cpp + * \li + * Tangential_complex/example_with_perturb.cpp + * \li + * Witness_complex/strong_witness_persistence.cpp + * \li + * Witness_complex/weak_witness_persistence.cpp + * \li + * Witness_complex/example_strong_witness_complex_off.cpp + * \li + * Witness_complex/example_witness_complex_off.cpp + * \li + * Witness_complex/example_witness_complex_sphere.cpp + * + * \subsection tbb Threading Building Blocks + * Intel® TBB lets you easily write parallel + * C++ programs that take full advantage of multicore performance, that are portable and composable, and that have + * future-proof scalability. + * + * Having Intel® TBB installed is recommended to parallelize and accelerate some GUDHI computations. + * + * The following examples/utilities are using Intel® TBB if installed: + * \li + * Alpha_complex/Alpha_complex_from_off.cpp + * \li + * Alpha_complex/Alpha_complex_from_points.cpp + * \li + * Alpha_complex/alpha_complex_3d_persistence.cpp + * \li + * Alpha_complex/alpha_complex_persistence.cpp + * \li + * Alpha_complex/exact_alpha_complex_3d_persistence.cpp + * \li + * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp + * \li + * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp + * \li + * Bitmap_cubical_complex/cubical_complex_persistence.cpp + * \li + * Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp + * \li + * Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp + * \li + * Nerve_GIC/CoordGIC.cpp + * \li + * Nerve_GIC/FuncGIC.cpp + * \li + * Nerve_GIC/Nerve.cpp + * \li + * Nerve_GIC/VoronoiGIC.cpp + * \li + * Simplex_tree/simple_simplex_tree.cpp + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp + * \li + * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp + * \li + * Simplex_tree/graph_expansion_with_blocker.cpp + * \li + * Persistent_cohomology/alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/alpha_complex_persistence.cpp + * \li + * Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp + * \li + * Persistent_cohomology/persistence_from_file.cpp + * \li + * Persistent_cohomology/persistence_from_simple_simplex_tree.cpp + * \li + * Persistent_cohomology/plain_homology.cpp + * \li + * Persistent_cohomology/rips_multifield_persistence.cpp + * \li + * Persistent_cohomology/rips_persistence_step_by_step.cpp + * \li + * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/custom_persistence_sort.cpp + * \li + * Rips_complex/example_one_skeleton_rips_from_points.cpp + * \li + * Rips_complex/example_rips_complex_from_off_file.cpp + * \li + * Rips_complex/rips_distance_matrix_persistence.cpp + * \li + * Rips_complex/rips_persistence.cpp + * \li + * Witness_complex/strong_witness_persistence.cpp + * \li + * Witness_complex/weak_witness_persistence.cpp + * \li + * Witness_complex/example_nearest_landmark_table.cpp + * + * \section Contributions Bug reports and contributions + * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: + * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim + * + * GUDHI is open to external contributions. If you want to join our development team, please contact us. + * +*/ + +/*! \page Citation Acknowledging the GUDHI library + * We kindly ask users to cite the GUDHI library as appropriately as possible in their papers, and to mention the use + * of the GUDHI library on the web pages of their projects using GUDHI and provide us with links to these web pages. + * Feel free to contact us in case you have any question or remark on this topic. + * + * We provide \ref GudhiBibtex entries for the modules of the User and Reference Manual, as well as for publications + * directly related to the GUDHI library. + * \section GudhiBibtex GUDHI bibtex + * \verbinclude biblio/how_to_cite_gudhi.bib +*/ diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 148ee670..b3e9ea03 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -93,7 +93,7 @@ - \subsection CoverComplexDataStructure Cover Complexes: Nerves and Graph Induced Complexes + \subsection CoverComplexDataStructure Cover Complexes \image html "gicvisu.jpg" "Graph Induced Complex of a point cloud." @@ -101,6 +101,7 @@ Author: Mathieu Carrière
Introduced in: GUDHI 2.1.0
Copyright: GPL v3
+ Requires: \ref cgal ≥ 4.8.1
Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain @@ -250,305 +251,3 @@
*/ - -/*! \page installation GUDHI installation - * \tableofcontents - * As GUDHI is a header only library, there is no need to install the library. - * - * Examples of GUDHI headers inclusion can be found in \ref demos. - * - * \section compiling Compiling - * The library uses c++11 and requires Boost with version 1.48.0 or - * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015. - * - * \subsection demos Demos and examples - * To build the demos and examples, run the following commands in a terminal: -\verbatim cd /path-to-gudhi/ -mkdir build -cd build/ -cmake .. -make \endverbatim - * A list of examples is available here. - * - * \subsection testsuites Test suites - * To test your build, run the following command in a terminal: - * \verbatim make test \endverbatim - * - * \subsection documentationgeneration Documentation - * To generate the documentation, Doxygen is required. - * Run the following command in a terminal: -\verbatim -make doxygen -# Documentation will be generated in the folder YYYY-MM-DD-hh-mm-ss_GUDHI_X.Y.Z/doc/html/ -# You can customize the directory name by calling `cmake -DUSER_VERSION_DIR=/my/custom/folder` -\endverbatim - * - * \section optionallibrary Optional third-party library - * \subsection gmp GMP - * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision - * arithmetic, operating on signed integers, rational numbers, and floating point numbers. - * - * The following example requires the GNU Multiple Precision Arithmetic - * Library (GMP) and will not be built if GMP is not installed: - * \li - * Persistent_cohomology/rips_multifield_persistence.cpp - * - * Having GMP version 4.2 or higher installed is recommended. - * - * \subsection cgal CGAL - * The \ref alpha_complex data structure, \ref bottleneck_distance, and few examples requires CGAL, which is a C++ - * library which provides easy access to efficient and reliable geometric algorithms. - * - * \note There is no need to install CGAL, you can just cmake . && make CGAL (or even - * cmake -DCGAL_HEADER_ONLY=ON . for CGAL version ≥ 4.8.0), thereafter you will be able to compile - * GUDHI by calling cmake -DCGAL_DIR=/your/path/to/CGAL-X.Y .. && make - * - * Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to - * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html - * - * The following examples/utilities require the Computational Geometry Algorithms - * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed: - * \li - * Alpha_complex/alpha_complex_3d_persistence.cpp - * \li - * Alpha_complex/exact_alpha_complex_3d_persistence.cpp - * \li - * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp - * \li - * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp - * - * The following examples/utilities require CGAL version ≥ 4.6.0: - * \li - * Witness_complex/strong_witness_persistence.cpp - * \li - * Witness_complex/weak_witness_persistence.cpp - * \li - * Witness_complex/example_strong_witness_complex_off.cpp - * \li - * Witness_complex/example_witness_complex_off.cpp - * \li - * Witness_complex/example_witness_complex_sphere.cpp - * - * The following example requires CGAL version ≥ 4.7.0: - * \li - * Alpha_complex/Alpha_complex_from_off.cpp - * \li - * Alpha_complex/Alpha_complex_from_points.cpp - * \li - * Alpha_complex/alpha_complex_persistence.cpp - * \li - * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp - * \li - * Persistent_cohomology/custom_persistence_sort.cpp - * - * The following example requires CGAL version ≥ 4.8.1: - * \li - * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp - * \li - * Bottleneck_distance/bottleneck_basic_example.cpp - * \li - * Bottleneck_distance/bottleneck_distance.cpp - * \li - * Spatial_searching/example_spatial_searching.cpp - * \li - * Subsampling/example_choose_n_farthest_points.cpp - * \li - * Subsampling/example_custom_kernel.cpp - * \li - * Subsampling/example_pick_n_random_points.cpp - * \li - * Subsampling/example_sparsify_point_set.cpp - * \li - * Tangential_complex/example_basic.cpp - * \li - * Tangential_complex/example_with_perturb.cpp - * - * \subsection eigen3 Eigen3 - * The \ref alpha_complex data structure and few examples requires - * Eigen3 is a C++ template library for linear algebra: - * matrices, vectors, numerical solvers, and related algorithms. - * - * The following examples/utilities require the Eigen3 and will not be - * built if Eigen3 is not installed: - * \li - * Alpha_complex/Alpha_complex_from_off.cpp - * \li - * Alpha_complex/Alpha_complex_from_points.cpp - * \li - * Alpha_complex/alpha_complex_persistence.cpp - * \li - * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp - * \li - * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp - * \li - * Persistent_cohomology/custom_persistence_sort.cpp - * \li - * Spatial_searching/example_spatial_searching.cpp - * \li - * Subsampling/example_choose_n_farthest_points.cpp - * \li - * Subsampling/example_custom_kernel.cpp - * \li - * Subsampling/example_pick_n_random_points.cpp - * \li - * Subsampling/example_sparsify_point_set.cpp - * \li - * Tangential_complex/example_basic.cpp - * \li - * Tangential_complex/example_with_perturb.cpp - * \li - * Witness_complex/strong_witness_persistence.cpp - * \li - * Witness_complex/weak_witness_persistence.cpp - * \li - * Witness_complex/example_strong_witness_complex_off.cpp - * \li - * Witness_complex/example_witness_complex_off.cpp - * \li - * Witness_complex/example_witness_complex_sphere.cpp - * - * \subsection tbb Threading Building Blocks - * Intel® TBB lets you easily write parallel - * C++ programs that take full advantage of multicore performance, that are portable and composable, and that have - * future-proof scalability. - * - * Having Intel® TBB installed is recommended to parallelize and accelerate some GUDHI computations. - * - * The following examples/utilities are using Intel® TBB if installed: - * \li - * Alpha_complex/Alpha_complex_from_off.cpp - * \li - * Alpha_complex/Alpha_complex_from_points.cpp - * \li - * Alpha_complex/alpha_complex_3d_persistence.cpp - * \li - * Alpha_complex/alpha_complex_persistence.cpp - * \li - * Alpha_complex/exact_alpha_complex_3d_persistence.cpp - * \li - * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp - * \li - * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp - * \li - * Bitmap_cubical_complex/cubical_complex_persistence.cpp - * \li - * Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp - * \li - * Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp - * \li - * Simplex_tree/simple_simplex_tree.cpp - * \li - * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp - * \li - * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp - * \li - * Simplex_tree/graph_expansion_with_blocker.cpp - * \li - * Persistent_cohomology/alpha_complex_3d_persistence.cpp - * \li - * Persistent_cohomology/alpha_complex_persistence.cpp - * \li - * Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp - * \li - * Persistent_cohomology/persistence_from_file.cpp - * \li - * Persistent_cohomology/persistence_from_simple_simplex_tree.cpp - * \li - * Persistent_cohomology/plain_homology.cpp - * \li - * Persistent_cohomology/rips_multifield_persistence.cpp - * \li - * Persistent_cohomology/rips_persistence_step_by_step.cpp - * \li - * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp - * \li - * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp - * \li - * Persistent_cohomology/custom_persistence_sort.cpp - * \li - * Rips_complex/example_one_skeleton_rips_from_points.cpp - * \li - * Rips_complex/example_rips_complex_from_off_file.cpp - * \li - * Rips_complex/rips_distance_matrix_persistence.cpp - * \li - * Rips_complex/rips_persistence.cpp - * \li - * Witness_complex/strong_witness_persistence.cpp - * \li - * Witness_complex/weak_witness_persistence.cpp - * \li - * Witness_complex/example_nearest_landmark_table.cpp - * - * \section Contributions Bug reports and contributions - * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: - * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim - * - * GUDHI is open to external contributions. If you want to join our development team, please contact us. - * -*/ - -/*! \page Citation Acknowledging the GUDHI library - * We kindly ask users to cite the GUDHI library as appropriately as possible in their papers, and to mention the use - * of the GUDHI library on the web pages of their projects using GUDHI and provide us with links to these web pages. - * Feel free to contact us in case you have any question or remark on this topic. - * - * We provide \ref GudhiBibtex entries for the modules of the User and Reference Manual, as well as for publications - * directly related to the GUDHI library. - * \section GudhiBibtex GUDHI bibtex - * \verbinclude biblio/how_to_cite_gudhi.bib -*/ - -// List of GUDHI examples - Doxygen needs at least a file tag to analyse comments -/*! @file Examples - * @example Alpha_complex/Alpha_complex_from_off.cpp - * @example Alpha_complex/Alpha_complex_from_points.cpp - * @example Alpha_complex/alpha_complex_3d_persistence.cpp - * @example Alpha_complex/alpha_complex_persistence.cpp - * @example Alpha_complex/exact_alpha_complex_3d_persistence.cpp - * @example Alpha_complex/periodic_alpha_complex_3d_persistence.cpp - * @example Alpha_complex/weighted_alpha_complex_3d_persistence.cpp - * @example Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp - * @example Bottleneck_distance/bottleneck_basic_example.cpp - * @example Bottleneck_distance/bottleneck_distance.cpp - * @example Bitmap_cubical_complex/cubical_complex_persistence.cpp - * @example Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp - * @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp - * @example common/example_CGAL_3D_points_off_reader.cpp - * @example common/example_CGAL_points_off_reader.cpp - * @example Contraction/Garland_heckbert.cpp - * @example Contraction/Rips_contraction.cpp - * @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp - * @example Persistent_cohomology/persistence_from_file.cpp - * @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp - * @example Persistent_cohomology/plain_homology.cpp - * @example Persistent_cohomology/rips_multifield_persistence.cpp - * @example Persistent_cohomology/custom_persistence_sort.cpp - * @example Persistent_cohomology/rips_persistence_step_by_step.cpp - * @example Rips_complex/example_one_skeleton_rips_from_points.cpp - * @example Rips_complex/example_rips_complex_from_off_file.cpp - * @example Rips_complex/rips_persistence.cpp - * @example Rips_complex/rips_distance_matrix_persistence.cpp - * @example Simplex_tree/mini_simplex_tree.cpp - * @example Simplex_tree/simple_simplex_tree.cpp - * @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp - * @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp - * @example Simplex_tree/graph_expansion_with_blocker.cpp - * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp - * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp - * @example Skeleton_blocker/Skeleton_blocker_link.cpp - * @example Spatial_searching/example_spatial_searching.cpp - * @example Subsampling/example_choose_n_farthest_points.cpp - * @example Subsampling/example_custom_kernel.cpp - * @example Subsampling/example_pick_n_random_points.cpp - * @example Subsampling/example_sparsify_point_set.cpp - * @example Tangential_complex/example_basic.cpp - * @example Tangential_complex/example_with_perturb.cpp - * @example Witness_complex/example_nearest_landmark_table.cpp - * @example Witness_complex/example_strong_witness_complex_off.cpp - * @example Witness_complex/example_witness_complex_off.cpp - * @example Witness_complex/example_witness_complex_sphere.cpp - * @example Witness_complex/weak_witness_persistence.cpp - * @example Witness_complex/strong_witness_persistence.cpp - */ - -- cgit v1.2.3 From 265484997185f3bf900744406206a2d64ca0a20d Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 2 Feb 2018 14:15:49 +0000 Subject: Fix after version release of Cover complex and Persistence representation installations Fix utils download link git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3211 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 9596df8b0d0cdc3d8df79aa367a26b34a96d7f5e --- src/Nerve_GIC/example/CMakeLists.txt | 3 +++ src/Nerve_GIC/utilities/CMakeLists.txt | 4 ++++ src/Persistence_representations/example/CMakeLists.txt | 8 +++++--- src/Persistence_representations/utilities/CMakeLists.txt | 6 ++++++ .../utilities/persistence_intervals/CMakeLists.txt | 6 ++++++ src/common/doc/header.html | 4 ++-- src/cython/doc/_templates/layout.html | 4 ++-- 7 files changed, 28 insertions(+), 7 deletions(-) (limited to 'src/Persistence_representations/example') diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt index f2626927..542c6af4 100644 --- a/src/Nerve_GIC/example/CMakeLists.txt +++ b/src/Nerve_GIC/example/CMakeLists.txt @@ -23,4 +23,7 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) "lucky_cat.off" "lucky_cat_PCA1") + install(TARGETS CoordGIC DESTINATION bin) + install(TARGETS FuncGIC DESTINATION bin) + endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt index 7762c8a0..7a838a8c 100644 --- a/src/Nerve_GIC/utilities/CMakeLists.txt +++ b/src/Nerve_GIC/utilities/CMakeLists.txt @@ -21,4 +21,8 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $ "human.off" "100") + install(TARGETS Nerve DESTINATION bin) + install(TARGETS VoronoiGIC DESTINATION bin) + install(FILES KeplerMapperVisuFromTxtFile.py km.py km.py.COPYRIGHT DESTINATION bin) + endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index eb3258f8..54d719ac 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -4,24 +4,26 @@ project(Persistence_representations_example) add_executable ( Persistence_representations_example_landscape_on_grid persistence_landscape_on_grid.cpp ) add_test(NAME Persistence_representations_example_landscape_on_grid COMMAND $) +install(TARGETS Persistence_representations_example_landscape_on_grid DESTINATION bin) add_executable ( Persistence_representations_example_landscape persistence_landscape.cpp ) add_test(NAME Persistence_representations_example_landscape COMMAND $) +install(TARGETS Persistence_representations_example_landscape DESTINATION bin) add_executable ( Persistence_representations_example_intervals persistence_intervals.cpp ) add_test(NAME Persistence_representations_example_intervals COMMAND $ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers") +install(TARGETS Persistence_representations_example_intervals DESTINATION bin) add_executable ( Persistence_representations_example_vectors persistence_vectors.cpp ) add_test(NAME Persistence_representations_example_vectors COMMAND $) +install(TARGETS Persistence_representations_example_vectors DESTINATION bin) add_executable ( Persistence_representations_example_heat_maps persistence_heat_maps.cpp ) add_test(NAME Persistence_representations_example_heat_maps COMMAND $) - - - +install(TARGETS Persistence_representations_example_heat_maps DESTINATION bin) diff --git a/src/Persistence_representations/utilities/CMakeLists.txt b/src/Persistence_representations/utilities/CMakeLists.txt index 137eb0c1..fc51b1d6 100644 --- a/src/Persistence_representations/utilities/CMakeLists.txt +++ b/src/Persistence_representations/utilities/CMakeLists.txt @@ -10,6 +10,8 @@ function(add_persistence_representation_creation_utility creation_utility) add_test(NAME Persistence_representation_utilities_${creation_utility} COMMAND $ ${ARGN} "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "${CMAKE_CURRENT_BINARY_DIR}/../second.pers") + + install(TARGETS ${creation_utility} DESTINATION bin) endfunction(add_persistence_representation_creation_utility) function(add_persistence_representation_plot_utility plot_utility tool_extension) @@ -26,6 +28,8 @@ function(add_persistence_representation_plot_utility plot_utility tool_extension #add_test(NAME Persistence_representation_utilities_${plot_utility}_second_gnuplot COMMAND ${GNUPLOT_PATH} # "-e" "load '${CMAKE_CURRENT_BINARY_DIR}/../second.pers${tool_extension}_GnuplotScript'") endif() + + install(TARGETS ${plot_utility} DESTINATION bin) endfunction(add_persistence_representation_plot_utility) function(add_persistence_representation_function_utility function_utility tool_extension) @@ -44,6 +48,8 @@ function(add_persistence_representation_function_utility function_utility tool_e "${CMAKE_CURRENT_BINARY_DIR}/../first.pers${tool_extension}" "${CMAKE_CURRENT_BINARY_DIR}/../second.pers${tool_extension}") endif() + + install(TARGETS ${function_utility} DESTINATION bin) endfunction(add_persistence_representation_function_utility) add_subdirectory(persistence_heat_maps) diff --git a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt index 897e12a3..875ff45e 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt +++ b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt @@ -7,6 +7,8 @@ add_executable ( plot_histogram_of_intervals_lengths plot_histogram_of_intervals add_test(NAME plot_histogram_of_intervals_lengths COMMAND $ "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "-1") +install(TARGETS plot_histogram_of_intervals_lengths DESTINATION bin) + add_persistence_representation_plot_utility(plot_persistence_intervals "") add_persistence_representation_plot_utility(plot_persistence_Betti_numbers "") @@ -18,6 +20,8 @@ add_test(NAME Persistence_representation_utilities_compute_number_of_dominant_in COMMAND $ "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "-1" "2") +install(TARGETS compute_number_of_dominant_intervals DESTINATION bin) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) add_executable ( compute_bottleneck_distance compute_bottleneck_distance.cpp ) @@ -29,4 +33,6 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) "-1" "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "${CMAKE_CURRENT_BINARY_DIR}/../second.pers") + + install(TARGETS compute_bottleneck_distance DESTINATION bin) endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) diff --git a/src/common/doc/header.html b/src/common/doc/header.html index 9c514381..d69b28fa 100644 --- a/src/common/doc/header.html +++ b/src/common/doc/header.html @@ -56,8 +56,8 @@ $extrastylesheet
  • diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html index 8e4eba40..c9356116 100644 --- a/src/cython/doc/_templates/layout.html +++ b/src/cython/doc/_templates/layout.html @@ -198,8 +198,8 @@
  • -- cgit v1.2.3