summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMathieuCarriere <mathieu.carriere3@gmail.com>2020-04-28 10:31:12 -0400
committerMathieuCarriere <mathieu.carriere3@gmail.com>2020-04-28 10:31:12 -0400
commit51ca2370ae27bec052bb4fffafc8a718ac306264 (patch)
tree67e1ea14d4c290295b0298dba0d701303a53f3b6
parent910f29401e053f668e4d277af098295ab05e6022 (diff)
parent0fb22e4c499b665ad505e5d9d2c325f7561f69c4 (diff)
fix conflict
-rw-r--r--.circleci/config.yml5
-rw-r--r--.github/next_release.md1
-rw-r--r--.github/test-requirements.txt6
-rw-r--r--Dockerfile_for_circleci_image3
-rw-r--r--biblio/bibliography.bib75
-rw-r--r--src/Alpha_complex/benchmark/Alpha_complex_3d_benchmark.cpp50
-rw-r--r--src/Alpha_complex/concept/SimplicialComplexForAlpha.h18
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h15
-rw-r--r--src/Alpha_complex/example/Alpha_complex_3d_from_points.cpp12
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_off.cpp2
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_points.cpp12
-rw-r--r--src/Alpha_complex/example/Fast_alpha_complex_from_off.cpp2
-rw-r--r--src/Alpha_complex/example/Weighted_alpha_complex_3d_from_points.cpp12
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h208
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex_3d.h29
-rw-r--r--src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp34
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp77
-rw-r--r--src/Alpha_complex/test/CMakeLists.txt4
-rw-r--r--src/Alpha_complex/test/Delaunay_complex_unit_test.cpp68
-rw-r--r--src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp16
-rw-r--r--src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp20
-rw-r--r--src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp20
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp31
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_persistence.cpp33
-rw-r--r--src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp2
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h56
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h38
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h15
-rw-r--r--src/Bitmap_cubical_complex/test/Bitmap_test.cpp14
-rw-r--r--src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp4
-rw-r--r--src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp4
-rw-r--r--src/Bottleneck_distance/doc/Intro_bottleneck_distance.h2
-rw-r--r--src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp38
-rw-r--r--src/Bottleneck_distance/example/bottleneck_basic_example.cpp4
-rw-r--r--src/Bottleneck_distance/utilities/bottleneck_distance.cpp4
-rw-r--r--src/Cech_complex/benchmark/cech_complex_benchmark.cpp24
-rw-r--r--src/Cech_complex/example/cech_complex_example_from_points.cpp12
-rw-r--r--src/Cech_complex/example/cech_complex_step_by_step.cpp28
-rw-r--r--src/Cech_complex/include/gudhi/Cech_complex_blocker.h4
-rw-r--r--src/Cech_complex/test/test_cech_complex.cpp50
-rw-r--r--src/Cech_complex/utilities/cech_persistence.cpp28
-rw-r--r--src/Contraction/example/Garland_heckbert.cpp6
-rw-r--r--src/Contraction/example/Rips_contraction.cpp10
-rw-r--r--src/Contraction/include/gudhi/Edge_contraction.h10
-rw-r--r--src/GudhUI/model/Model.h64
-rw-r--r--src/GudhUI/utils/Bar_code_persistence.h4
-rw-r--r--src/GudhUI/utils/Critical_points.h2
-rw-r--r--src/GudhUI/utils/Rips_builder.h6
-rw-r--r--src/GudhUI/view/View_parameter.h6
-rw-r--r--src/Hasse_complex/include/gudhi/Hasse_complex.h6
-rw-r--r--src/Nerve_GIC/example/CoordGIC.cpp10
-rw-r--r--src/Nerve_GIC/example/FuncGIC.cpp10
-rw-r--r--src/Nerve_GIC/include/gudhi/GIC.h90
-rw-r--r--src/Nerve_GIC/utilities/Nerve.cpp10
-rw-r--r--src/Nerve_GIC/utilities/VoronoiGIC.cpp10
-rw-r--r--src/Persistence_representations/example/persistence_heat_maps.cpp8
-rw-r--r--src/Persistence_representations/example/persistence_intervals.cpp40
-rw-r--r--src/Persistence_representations/example/persistence_landscape.cpp24
-rw-r--r--src/Persistence_representations/example/persistence_landscape_on_grid.cpp22
-rw-r--r--src/Persistence_representations/example/persistence_vectors.cpp10
-rw-r--r--src/Persistence_representations/example/sliced_wasserstein.cpp8
-rw-r--r--src/Persistence_representations/include/gudhi/Persistence_heat_maps.h66
-rw-r--r--src/Persistence_representations/include/gudhi/Persistence_intervals.h56
-rw-r--r--src/Persistence_representations/include/gudhi/Persistence_landscape.h222
-rw-r--r--src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h122
-rw-r--r--src/Persistence_representations/include/gudhi/Persistence_vectors.h30
-rw-r--r--src/Persistence_representations/include/gudhi/read_persistence_from_file.h16
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp4
-rw-r--r--src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp8
-rw-r--r--src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp8
-rw-r--r--src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp4
-rw-r--r--src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp2
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp4
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp4
-rw-r--r--src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp14
-rw-r--r--src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp10
-rw-r--r--src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp6
-rw-r--r--src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp4
-rw-r--r--src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp36
-rw-r--r--src/Persistent_cohomology/example/custom_persistence_sort.cpp29
-rw-r--r--src/Persistent_cohomology/example/persistence_from_file.cpp43
-rw-r--r--src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp46
-rw-r--r--src/Persistent_cohomology/example/plain_homology.cpp11
-rw-r--r--src/Persistent_cohomology/example/rips_multifield_persistence.cpp37
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp31
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp28
-rw-r--r--src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h32
-rw-r--r--src/Persistent_cohomology/test/betti_numbers_unit_test.cpp30
-rw-r--r--src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp28
-rw-r--r--src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp8
-rw-r--r--src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp12
-rw-r--r--src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp12
-rw-r--r--src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp12
-rw-r--r--src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp2
-rw-r--r--src/Rips_complex/example/example_rips_complex_from_off_file.cpp2
-rw-r--r--src/Rips_complex/example/example_sparse_rips.cpp2
-rw-r--r--src/Rips_complex/test/test_rips_complex.cpp108
-rw-r--r--src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp31
-rw-r--r--src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp31
-rw-r--r--src/Rips_complex/utilities/rips_persistence.cpp31
-rw-r--r--src/Rips_complex/utilities/sparse_rips_persistence.cpp31
-rw-r--r--src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp30
-rw-r--r--src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp68
-rw-r--r--src/Simplex_tree/example/graph_expansion_with_blocker.cpp22
-rw-r--r--src/Simplex_tree/example/mini_simplex_tree.cpp4
-rw-r--r--src/Simplex_tree/example/simple_simplex_tree.cpp148
-rw-r--r--src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp56
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h210
-rw-r--r--src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp32
-rw-r--r--src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp88
-rw-r--r--src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp46
-rw-r--r--src/Simplex_tree/test/simplex_tree_make_filtration_non_decreasing_unit_test.cpp22
-rw-r--r--src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp154
-rw-r--r--src/Simplex_tree/test/simplex_tree_unit_test.cpp234
-rw-r--r--src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp16
-rw-r--r--src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp8
-rw-r--r--src/Skeleton_blocker/example/Skeleton_blocker_link.cpp10
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h20
-rw-r--r--src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp88
-rw-r--r--src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp16
-rw-r--r--src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp92
-rw-r--r--src/Spatial_searching/example/example_spatial_searching.cpp20
-rw-r--r--src/Subsampling/example/example_choose_n_farthest_points.cpp4
-rw-r--r--src/Subsampling/example/example_custom_kernel.cpp6
-rw-r--r--src/Subsampling/example/example_pick_n_random_points.cpp4
-rw-r--r--src/Subsampling/example/example_sparsify_point_set.cpp4
-rw-r--r--src/Subsampling/test/test_pick_n_random_points.cpp4
-rw-r--r--src/Subsampling/test/test_sparsify_point_set.cpp6
-rw-r--r--src/Tangential_complex/test/test_tangential_complex.cpp10
-rw-r--r--src/Toplex_map/benchmark/benchmark_tm.cpp20
-rw-r--r--src/Toplex_map/example/simple_toplex_map.cpp54
-rw-r--r--src/Toplex_map/test/lazy_toplex_map_unit_test.cpp76
-rw-r--r--src/Toplex_map/test/toplex_map_unit_test.cpp50
-rw-r--r--src/Witness_complex/example/example_nearest_landmark_table.cpp2
-rw-r--r--src/Witness_complex/example/example_strong_witness_complex_off.cpp8
-rw-r--r--src/Witness_complex/example/example_witness_complex_off.cpp8
-rw-r--r--src/Witness_complex/example/example_witness_complex_sphere.cpp8
-rw-r--r--src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp16
-rw-r--r--src/Witness_complex/test/test_simple_witness_complex.cpp4
-rw-r--r--src/Witness_complex/utilities/strong_witness_persistence.cpp32
-rw-r--r--src/Witness_complex/utilities/weak_witness_persistence.cpp32
-rw-r--r--src/Witness_complex/utilities/witnesscomplex.md4
-rw-r--r--src/cmake/modules/GUDHI_compilation_flags.cmake37
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.cmake23
-rw-r--r--src/cmake/modules/GUDHI_user_version_target.cmake6
-rw-r--r--src/common/benchmark/Graph_simplicial_complex_benchmark.cpp8
-rw-r--r--src/common/example/example_CGAL_3D_points_off_reader.cpp2
-rw-r--r--src/common/example/example_CGAL_points_off_reader.cpp6
-rw-r--r--src/common/include/gudhi/Clock.h4
-rw-r--r--src/common/include/gudhi/Debug_utils.h10
-rw-r--r--src/common/include/gudhi/Points_3D_off_io.h8
-rw-r--r--src/common/include/gudhi/Points_off_io.h8
-rw-r--r--src/common/include/gudhi/Unitary_tests_utils.h4
-rw-r--r--src/common/include/gudhi/distance_functions.h2
-rw-r--r--src/common/include/gudhi/reader_utils.h16
-rw-r--r--src/common/test/test_distance_matrix_reader.cpp16
-rw-r--r--src/common/test/test_persistence_intervals_reader.cpp124
-rw-r--r--src/python/CMakeLists.txt159
-rw-r--r--src/python/doc/alpha_complex_sum.inc6
-rw-r--r--src/python/doc/alpha_complex_user.rst8
-rw-r--r--src/python/doc/bottleneck_distance_sum.inc6
-rw-r--r--src/python/doc/bottleneck_distance_user.rst1
-rw-r--r--src/python/doc/cubical_complex_sum.inc6
-rw-r--r--src/python/doc/cubical_complex_user.rst9
-rw-r--r--src/python/doc/img/barycenter.pngbin0 -> 12433 bytes
-rw-r--r--src/python/doc/index.rst8
-rw-r--r--src/python/doc/installation.rst36
-rw-r--r--src/python/doc/nerve_gic_complex_sum.inc6
-rw-r--r--src/python/doc/persistence_graphical_tools_sum.inc6
-rw-r--r--src/python/doc/persistent_cohomology_sum.inc6
-rw-r--r--src/python/doc/persistent_cohomology_user.rst9
-rw-r--r--src/python/doc/point_cloud.rst19
-rw-r--r--src/python/doc/point_cloud_sum.inc10
-rw-r--r--src/python/doc/representations_sum.inc6
-rw-r--r--src/python/doc/rips_complex_sum.inc6
-rw-r--r--src/python/doc/rips_complex_user.rst2
-rw-r--r--src/python/doc/simplex_tree_ref.rst1
-rw-r--r--src/python/doc/simplex_tree_sum.inc6
-rw-r--r--src/python/doc/tangential_complex_sum.inc6
-rw-r--r--src/python/doc/tangential_complex_user.rst8
-rw-r--r--src/python/doc/wasserstein_distance_sum.inc12
-rw-r--r--src/python/doc/wasserstein_distance_user.rst96
-rw-r--r--src/python/doc/witness_complex_sum.inc6
-rw-r--r--src/python/doc/witness_complex_user.rst7
-rw-r--r--src/python/doc/zbibliography.rst10
-rwxr-xr-xsrc/python/example/alpha_complex_diagram_persistence_from_off_file_example.py13
-rwxr-xr-xsrc/python/example/alpha_complex_from_points_example.py3
-rwxr-xr-xsrc/python/example/alpha_rips_persistence_bottleneck_distance.py37
-rwxr-xr-xsrc/python/example/diagram_vectorizations_distances_kernels.py2
-rwxr-xr-xsrc/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py19
-rwxr-xr-xsrc/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py11
-rwxr-xr-xsrc/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py16
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_off_file_example.py16
-rwxr-xr-xsrc/python/example/simplex_tree_example.py1
-rwxr-xr-xsrc/python/example/tangential_complex_plain_homology_from_off_file_example.py18
-rw-r--r--src/python/gudhi/alpha_complex.pyx10
-rw-r--r--src/python/gudhi/cubical_complex.pyx80
-rw-r--r--src/python/gudhi/nerve_gic.pyx37
-rw-r--r--src/python/gudhi/off_reader.pyx12
-rw-r--r--src/python/gudhi/periodic_cubical_complex.pyx73
-rw-r--r--src/python/gudhi/point_cloud/dtm.py70
-rw-r--r--src/python/gudhi/point_cloud/knn.py324
-rw-r--r--src/python/gudhi/representations/metrics.py60
-rw-r--r--src/python/gudhi/simplex_tree.pxd10
-rw-r--r--src/python/gudhi/simplex_tree.pyx203
-rw-r--r--src/python/gudhi/wasserstein/__init__.py1
-rw-r--r--src/python/gudhi/wasserstein/barycenter.py159
-rw-r--r--src/python/gudhi/wasserstein/wasserstein.py (renamed from src/python/gudhi/wasserstein.py)42
-rw-r--r--src/python/include/Alpha_complex_interface.h1
-rw-r--r--src/python/include/Euclidean_strong_witness_complex_interface.h2
-rw-r--r--src/python/include/Euclidean_witness_complex_interface.h2
-rw-r--r--src/python/include/Nerve_gic_interface.h1
-rw-r--r--src/python/include/Persistent_cohomology_interface.h28
-rw-r--r--src/python/include/Rips_complex_interface.h1
-rw-r--r--src/python/include/Simplex_tree_interface.h55
-rw-r--r--src/python/include/Strong_witness_complex_interface.h2
-rw-r--r--src/python/include/Tangential_complex_interface.h1
-rw-r--r--src/python/include/Witness_complex_interface.h2
-rwxr-xr-xsrc/python/test/test_cover_complex.py4
-rwxr-xr-xsrc/python/test/test_cubical_complex.py6
-rwxr-xr-xsrc/python/test/test_dtm.py68
-rwxr-xr-xsrc/python/test/test_knn.py130
-rwxr-xr-xsrc/python/test/test_simplex_tree.py85
-rwxr-xr-xsrc/python/test/test_subsampling.py1
-rwxr-xr-xsrc/python/test/test_wasserstein_barycenter.py46
-rwxr-xr-xsrc/python/test/test_wasserstein_distance.py9
245 files changed, 4111 insertions, 2812 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
index 4f86cb12..40ddc08e 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -45,7 +45,6 @@ jobs:
python:
docker:
- image: gudhi/ci_for_gudhi:latest
- parallelism: 4
steps:
- checkout
- run:
@@ -62,12 +61,12 @@ jobs:
cd build;
cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 ..;
cd python;
- python3 setup.py build_ext -j 4 --inplace;
+ python3 setup.py build_ext -j 2 --inplace;
make sphinx;
cp -R sphinx /tmp/sphinx;
python3 setup.py install;
python3 setup.py clean --all;
- ctest -j 4 --output-on-failure;
+ ctest -j 2 --output-on-failure;
- store_artifacts:
path: /tmp/sphinx
diff --git a/.github/next_release.md b/.github/next_release.md
index 3166b0a8..83b98a1c 100644
--- a/.github/next_release.md
+++ b/.github/next_release.md
@@ -9,6 +9,7 @@ Below is a list of changes made since GUDHI 3.1.1:
- [Wassertein distance](https://gudhi.inria.fr/python/latest/wasserstein_distance_user.html)
- An another implementation comes from Hera (BSD-3-Clause) which is based on [Geometry Helps to Compare Persistence Diagrams](http://doi.acm.org/10.1145/3064175) by Michael Kerber, Dmitriy Morozov, and Arnur Nigmetov.
+ - `gudhi.wasserstein.wasserstein_distance` has now an option to return the optimal matching that achieves the distance between the two diagrams.
- [Module](link)
- ...
diff --git a/.github/test-requirements.txt b/.github/test-requirements.txt
index 18882792..fb1df134 100644
--- a/.github/test-requirements.txt
+++ b/.github/test-requirements.txt
@@ -6,4 +6,8 @@ matplotlib
scipy
scikit-learn
POT
-tensorflow \ No newline at end of file
+tensorflow
+torch
+pykeops
+hnswlib
+eagerpy
diff --git a/Dockerfile_for_circleci_image b/Dockerfile_for_circleci_image
index 1eededb5..c2e8a8f5 100644
--- a/Dockerfile_for_circleci_image
+++ b/Dockerfile_for_circleci_image
@@ -43,6 +43,7 @@ RUN apt-get install -y make \
python3 \
python3-pip \
python3-tk \
+ python3-grpcio \
libfreetype6-dev \
pkg-config \
curl
@@ -51,7 +52,7 @@ ADD .github/build-requirements.txt /
ADD .github/test-requirements.txt /
RUN pip3 install -r build-requirements.txt
-RUN pip3 install -r test-requirements.txt
+RUN pip3 --no-cache-dir install -r test-requirements.txt
# apt clean up
RUN apt autoremove && rm -rf /var/lib/apt/lists/*
diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib
index 3bbe7960..99a15c5e 100644
--- a/biblio/bibliography.bib
+++ b/biblio/bibliography.bib
@@ -7,11 +7,13 @@
}
@article{Carriere17c,
- author = {Carri\`ere, Mathieu and Michel, Bertrand and Oudot, Steve},
- title = {{Statistical Analysis and Parameter Selection for Mapper}},
- journal = {CoRR},
- volume = {abs/1706.00204},
- year = {2017}
+author = {Carri{\`{e}}re, Mathieu and Michel, Bertrand and Oudot, Steve},
+journal = {Journal of Machine Learning Research},
+pages = {1--39},
+publisher = {JMLR.org},
+title = {{Statistical analysis and parameter selection for Mapper}},
+volume = {19},
+year = {2018}
}
@inproceedings{Dey13,
@@ -23,11 +25,15 @@
}
@article{Carriere16,
- title={{Structure and Stability of the 1-Dimensional Mapper}},
- author={Carri\`ere, Mathieu and Oudot, Steve},
- journal={CoRR},
- volume= {abs/1511.05823},
- year={2015}
+author = {Carri{\`{e}}re, Mathieu and Oudot, Steve},
+journal = {Foundations of Computational Mathematics},
+number = {6},
+pages = {1333--1396},
+publisher = {Springer-Verlag},
+doi = {10.1007/s10208-017-9370-z},
+title = {{Structure and stability of the one-dimensional Mapper}},
+volume = {18},
+year = {2017}
}
@inproceedings{zigzag_reflection,
@@ -36,6 +42,18 @@
year = {2014 $\ \ \ \ \ \ \ \ \ \ \ $ \emph{In Preparation}},
}
+@article{Cohen-Steiner2009,
+author = {Cohen-Steiner, David and Edelsbrunner, Herbert and Harer, John},
+journal = {Foundations of Computational Mathematics},
+number = {1},
+pages = {79--103},
+publisher = {Springer-Verlag},
+doi = {10.1007/s10208-008-9027-z},
+title = {{Extending persistence using Poincar{\'{e}} and Lefschetz duality}},
+volume = {9},
+year = {2009}
+}
+
@misc{gudhi_stpcoh,
author = {Cl\'ement Maria},
title = "\textsc{Gudhi}, Simplex Tree and Persistent Cohomology Packages",
@@ -1192,3 +1210,40 @@ numpages = {11},
location = {Montr\'{e}al, Canada},
series = {NIPS’18}
}
+@Article{dtm,
+author={Chazal, Fr{\'e}d{\'e}ric
+and Cohen-Steiner, David
+and M{\'e}rigot, Quentin},
+title={Geometric Inference for Probability Measures},
+journal={Foundations of Computational Mathematics},
+year={2011},
+volume={11},
+number={6},
+pages={733-751},
+abstract={Data often comes in the form of a point cloud sampled from an unknown compact subset of Euclidean space. The general goal of geometric inference is then to recover geometric and topological features (e.g., Betti numbers, normals) of this subset from the approximating point cloud data. It appears that the study of distance functions allows one to address many of these questions successfully. However, one of the main limitations of this framework is that it does not cope well with outliers or with background noise. In this paper, we show how to extend the framework of distance functions to overcome this problem. Replacing compact subsets by measures, we introduce a notion of distance function to a probability distribution in Rd. These functions share many properties with classical distance functions, which make them suitable for inference purposes. In particular, by considering appropriate level sets of these distance functions, we show that it is possible to reconstruct offsets of sampled shapes with topological guarantees even in the presence of outliers. Moreover, in settings where empirical measures are considered, these functions can be easily evaluated, making them of particular practical interest.},
+issn={1615-3383},
+doi={10.1007/s10208-011-9098-0},
+url={https://doi.org/10.1007/s10208-011-9098-0}
+}
+@article{dtmdensity,
+author = "Biau, Gérard and Chazal, Frédéric and Cohen-Steiner, David and Devroye, Luc and Rodríguez, Carlos",
+doi = "10.1214/11-EJS606",
+fjournal = "Electronic Journal of Statistics",
+journal = "Electron. J. Statist.",
+pages = "204--237",
+publisher = "The Institute of Mathematical Statistics and the Bernoulli Society",
+title = "A weighted k-nearest neighbor density estimate for geometric inference",
+url = "https://doi.org/10.1214/11-EJS606",
+volume = "5",
+year = "2011"
+}
+@article{turner2014frechet,
+ title={Fr{\'e}chet means for distributions of persistence diagrams},
+ author={Turner, Katharine and Mileyko, Yuriy and Mukherjee, Sayan and Harer, John},
+ journal={Discrete \& Computational Geometry},
+ volume={52},
+ number={1},
+ pages={44--70},
+ year={2014},
+ publisher={Springer}
+}
diff --git a/src/Alpha_complex/benchmark/Alpha_complex_3d_benchmark.cpp b/src/Alpha_complex/benchmark/Alpha_complex_3d_benchmark.cpp
index 99ad94b9..e7d85686 100644
--- a/src/Alpha_complex/benchmark/Alpha_complex_3d_benchmark.cpp
+++ b/src/Alpha_complex/benchmark/Alpha_complex_3d_benchmark.cpp
@@ -19,7 +19,7 @@ std::ofstream results_csv("results.csv");
template <typename Kernel>
void benchmark_points_on_torus_dD(const std::string& msg) {
- std::cout << "+ " << msg << std::endl;
+ std::clog << "+ " << msg << std::endl;
results_csv << "\"" << msg << "\";" << std::endl;
results_csv << "\"nb_points\";"
@@ -29,7 +29,7 @@ void benchmark_points_on_torus_dD(const std::string& msg) {
using K = CGAL::Epick_d<CGAL::Dimension_tag<3>>;
for (int nb_points = 1000; nb_points <= 125000; nb_points *= 5) {
- std::cout << " Alpha complex dD on torus with " << nb_points << " points." << std::endl;
+ std::clog << " Alpha complex dD on torus with " << nb_points << " points." << std::endl;
std::vector<K::Point_d> points_on_torus = Gudhi::generate_points_on_torus_3D<K>(nb_points, 1.0, 0.5);
std::vector<typename Kernel::Point_d> points;
@@ -41,26 +41,26 @@ void benchmark_points_on_torus_dD(const std::string& msg) {
ac_create_clock.begin();
Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_points(points);
ac_create_clock.end();
- std::cout << ac_create_clock;
+ std::clog << ac_create_clock;
Gudhi::Simplex_tree<> complex;
Gudhi::Clock st_create_clock(" benchmark_points_on_torus_dD - complex creation");
st_create_clock.begin();
alpha_complex_from_points.create_complex(complex);
st_create_clock.end();
- std::cout << st_create_clock;
+ std::clog << st_create_clock;
results_csv << nb_points << ";" << complex.num_simplices() << ";" << ac_create_clock.num_seconds() << ";"
<< st_create_clock.num_seconds() << ";" << std::endl;
- std::cout << " benchmark_points_on_torus_dD - nb simplices = " << complex.num_simplices() << std::endl;
+ std::clog << " benchmark_points_on_torus_dD - nb simplices = " << complex.num_simplices() << std::endl;
}
}
template <typename Alpha_complex_3d>
void benchmark_points_on_torus_3D(const std::string& msg) {
using K = CGAL::Epick_d<CGAL::Dimension_tag<3>>;
- std::cout << "+ " << msg << std::endl;
+ std::clog << "+ " << msg << std::endl;
results_csv << "\"" << msg << "\";" << std::endl;
results_csv << "\"nb_points\";"
@@ -69,7 +69,7 @@ void benchmark_points_on_torus_3D(const std::string& msg) {
<< "\"complex_creation_time(sec.)\";" << std::endl;
for (int nb_points = 1000; nb_points <= 125000; nb_points *= 5) {
- std::cout << " Alpha complex 3d on torus with " << nb_points << " points." << std::endl;
+ std::clog << " Alpha complex 3d on torus with " << nb_points << " points." << std::endl;
std::vector<K::Point_d> points_on_torus = Gudhi::generate_points_on_torus_3D<K>(nb_points, 1.0, 0.5);
std::vector<typename Alpha_complex_3d::Point_3> points;
@@ -81,19 +81,19 @@ void benchmark_points_on_torus_3D(const std::string& msg) {
ac_create_clock.begin();
Alpha_complex_3d alpha_complex_from_points(points);
ac_create_clock.end();
- std::cout << ac_create_clock;
+ std::clog << ac_create_clock;
Gudhi::Simplex_tree<> complex;
Gudhi::Clock st_create_clock(" benchmark_points_on_torus_3D - complex creation");
st_create_clock.begin();
alpha_complex_from_points.create_complex(complex);
st_create_clock.end();
- std::cout << st_create_clock;
+ std::clog << st_create_clock;
results_csv << nb_points << ";" << complex.num_simplices() << ";" << ac_create_clock.num_seconds() << ";"
<< st_create_clock.num_seconds() << ";" << std::endl;
- std::cout << " benchmark_points_on_torus_3D - nb simplices = " << complex.num_simplices() << std::endl;
+ std::clog << " benchmark_points_on_torus_3D - nb simplices = " << complex.num_simplices() << std::endl;
}
}
@@ -101,7 +101,7 @@ template <typename Weighted_alpha_complex_3d>
void benchmark_weighted_points_on_torus_3D(const std::string& msg) {
using K = CGAL::Epick_d<CGAL::Dimension_tag<3>>;
- std::cout << "+ " << msg << std::endl;
+ std::clog << "+ " << msg << std::endl;
results_csv << "\"" << msg << "\";" << std::endl;
results_csv << "\"nb_points\";"
@@ -112,7 +112,7 @@ void benchmark_weighted_points_on_torus_3D(const std::string& msg) {
CGAL::Random random(8);
for (int nb_points = 1000; nb_points <= 125000; nb_points *= 5) {
- std::cout << " Alpha complex 3d on torus with " << nb_points << " points." << std::endl;
+ std::clog << " Alpha complex 3d on torus with " << nb_points << " points." << std::endl;
std::vector<K::Point_d> points_on_torus = Gudhi::generate_points_on_torus_3D<K>(nb_points, 1.0, 0.5);
using Point = typename Weighted_alpha_complex_3d::Bare_point_3;
@@ -128,25 +128,25 @@ void benchmark_weighted_points_on_torus_3D(const std::string& msg) {
ac_create_clock.begin();
Weighted_alpha_complex_3d alpha_complex_from_points(points);
ac_create_clock.end();
- std::cout << ac_create_clock;
+ std::clog << ac_create_clock;
Gudhi::Simplex_tree<> complex;
Gudhi::Clock st_create_clock(" benchmark_weighted_points_on_torus_3D - complex creation");
st_create_clock.begin();
alpha_complex_from_points.create_complex(complex);
st_create_clock.end();
- std::cout << st_create_clock;
+ std::clog << st_create_clock;
results_csv << nb_points << ";" << complex.num_simplices() << ";" << ac_create_clock.num_seconds() << ";"
<< st_create_clock.num_seconds() << ";" << std::endl;
- std::cout << " benchmark_weighted_points_on_torus_3D - nb simplices = " << complex.num_simplices() << std::endl;
+ std::clog << " benchmark_weighted_points_on_torus_3D - nb simplices = " << complex.num_simplices() << std::endl;
}
}
template <typename Periodic_alpha_complex_3d>
void benchmark_periodic_points(const std::string& msg) {
- std::cout << "+ " << msg << std::endl;
+ std::clog << "+ " << msg << std::endl;
results_csv << "\"" << msg << "\";" << std::endl;
results_csv << "\"nb_points\";"
@@ -157,7 +157,7 @@ void benchmark_periodic_points(const std::string& msg) {
CGAL::Random random(8);
for (double nb_points = 10.; nb_points <= 40.; nb_points += 10.) {
- std::cout << " Periodic alpha complex 3d with " << nb_points * nb_points * nb_points << " points." << std::endl;
+ std::clog << " Periodic alpha complex 3d with " << nb_points * nb_points * nb_points << " points." << std::endl;
using Point = typename Periodic_alpha_complex_3d::Point_3;
std::vector<Point> points;
@@ -174,25 +174,25 @@ void benchmark_periodic_points(const std::string& msg) {
ac_create_clock.begin();
Periodic_alpha_complex_3d alpha_complex_from_points(points, 0., 0., 0., nb_points, nb_points, nb_points);
ac_create_clock.end();
- std::cout << ac_create_clock;
+ std::clog << ac_create_clock;
Gudhi::Simplex_tree<> complex;
Gudhi::Clock st_create_clock(" benchmark_periodic_points - complex creation");
st_create_clock.begin();
alpha_complex_from_points.create_complex(complex);
st_create_clock.end();
- std::cout << st_create_clock;
+ std::clog << st_create_clock;
results_csv << nb_points * nb_points * nb_points << ";" << complex.num_simplices() << ";"
<< ac_create_clock.num_seconds() << ";" << st_create_clock.num_seconds() << ";" << std::endl;
- std::cout << " benchmark_periodic_points - nb simplices = " << complex.num_simplices() << std::endl;
+ std::clog << " benchmark_periodic_points - nb simplices = " << complex.num_simplices() << std::endl;
}
}
template <typename Weighted_periodic_alpha_complex_3d>
void benchmark_weighted_periodic_points(const std::string& msg) {
- std::cout << "+ " << msg << std::endl;
+ std::clog << "+ " << msg << std::endl;
results_csv << "\"" << msg << "\";" << std::endl;
results_csv << "\"nb_points\";"
@@ -203,7 +203,7 @@ void benchmark_weighted_periodic_points(const std::string& msg) {
CGAL::Random random(8);
for (double nb_points = 10.; nb_points <= 40.; nb_points += 10.) {
- std::cout << " Weighted periodic alpha complex 3d with " << nb_points * nb_points * nb_points << " points."
+ std::clog << " Weighted periodic alpha complex 3d with " << nb_points * nb_points * nb_points << " points."
<< std::endl;
using Point = typename Weighted_periodic_alpha_complex_3d::Bare_point_3;
@@ -224,19 +224,19 @@ void benchmark_weighted_periodic_points(const std::string& msg) {
ac_create_clock.begin();
Weighted_periodic_alpha_complex_3d alpha_complex_from_points(points, 0., 0., 0., nb_points, nb_points, nb_points);
ac_create_clock.end();
- std::cout << ac_create_clock;
+ std::clog << ac_create_clock;
Gudhi::Simplex_tree<> complex;
Gudhi::Clock st_create_clock(" benchmark_weighted_periodic_points - complex creation");
st_create_clock.begin();
alpha_complex_from_points.create_complex(complex);
st_create_clock.end();
- std::cout << st_create_clock;
+ std::clog << st_create_clock;
results_csv << nb_points * nb_points * nb_points << ";" << complex.num_simplices() << ";"
<< ac_create_clock.num_seconds() << ";" << st_create_clock.num_seconds() << ";" << std::endl;
- std::cout << " benchmark_weighted_periodic_points - nb simplices = " << complex.num_simplices() << std::endl;
+ std::clog << " benchmark_weighted_periodic_points - nb simplices = " << complex.num_simplices() << std::endl;
}
}
diff --git a/src/Alpha_complex/concept/SimplicialComplexForAlpha.h b/src/Alpha_complex/concept/SimplicialComplexForAlpha.h
index 1c6c3b0c..c20c3201 100644
--- a/src/Alpha_complex/concept/SimplicialComplexForAlpha.h
+++ b/src/Alpha_complex/concept/SimplicialComplexForAlpha.h
@@ -72,6 +72,24 @@ struct SimplicialComplexForAlpha {
/** \brief Return type of an insertion of a simplex
*/
typedef unspecified Insertion_result_type;
+
+ /** \name Map interface
+ * Conceptually a `std::unordered_map<Simplex_handle,std::size_t>`.
+ * @{ */
+ /** \brief Data stored for each simplex.
+ *
+ * Must be an integer type. */
+ typedef unspecified Simplex_key;
+ /** \brief Returns a constant dummy number that is either negative,
+ * or at least as large as the number of simplices. Suggested value: -1. */
+ Simplex_key null_key ();
+ /** \brief Returns the number stored for a simplex by `assign_key()`.
+ *
+ * If `assign_key()` has not been called, it must return `null_key()`. */
+ Simplex_key key ( Simplex_handle sh );
+ /** \brief Store a number for a simplex, which can later be retrieved with `key()`. */
+ void assign_key(Simplex_handle sh, Simplex_key n);
+ /** @} */
};
} // namespace alpha_complex
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
index a8b1a106..60da7169 100644
--- a/src/Alpha_complex/doc/Intro_alpha_complex.h
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -46,16 +46,17 @@ namespace alpha_complex {
* \cite cgal:s-gkd-19b from CGAL as template parameter.
*
* \remark
- * - When an \f$\alpha\f$-complex is constructed with an infinite value of \f$ \alpha^2 \f$, the complex is a Delaunay
- * complex (with special filtration values).
+ * - When the simplicial complex is constructed with an infinite value of \f$ \alpha^2 \f$, the complex is a Delaunay
+ * complex with special filtration values. The Delaunay complex without filtration values is also available by passing
+ * `default_filtration_value=true` to `Alpha_complex::create_complex`.
* - For people only interested in the topology of the \ref alpha_complex (for instance persistence),
* \ref alpha_complex is equivalent to the \ref cech_complex and much smaller if you do not bound the radii.
* \ref cech_complex can still make sense in higher dimension precisely because you can bound the radii.
- * - Using the default `CGAL::Epeck_d` makes the construction safe. If you pass exact=true to create_complex, the
- * filtration values are the exact ones converted to the filtration value type of the simplicial complex. This can be
- * very slow. If you pass exact=false (the default), the filtration values are only guaranteed to have a small
- * multiplicative error compared to the exact value, see <code><a class="el" target="_blank"
- * href="https://doc.cgal.org/latest/Number_types/classCGAL_1_1Lazy__exact__nt.html">
+ * - Using the default `CGAL::Epeck_d` makes the construction safe. If you pass `exact=true` to
+ * `Alpha_complex::create_complex`, the filtration values are the exact ones converted to the filtration value type of
+ * the simplicial complex. This can be very slow. If you pass `exact=false` (the default), the filtration values are
+ * only guaranteed to have a small multiplicative error compared to the exact value, see <code>
+ * <a class="el" target="_blank" href="https://doc.cgal.org/latest/Number_types/classCGAL_1_1Lazy__exact__nt.html">
* CGAL::Lazy_exact_nt<NT>::set_relative_precision_of_to_double</a></code> for details. A drawback, when computing
* persistence, is that an empty exact interval [10^12,10^12] may become a non-empty approximate interval
* [10^12,10^12+10^6]. Using `CGAL::Epick_d` makes the computations slightly faster, and the combinatorics are still
diff --git a/src/Alpha_complex/example/Alpha_complex_3d_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_3d_from_points.cpp
index 0e359a27..a2c85138 100644
--- a/src/Alpha_complex/example/Alpha_complex_3d_from_points.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_3d_from_points.cpp
@@ -38,18 +38,18 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
// Display information about the alpha complex
// ----------------------------------------------------------------------------
- std::cout << "Alpha complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
+ std::clog << "Alpha complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
<< " simplices - " << simplex.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
for (auto f_simplex : simplex.filtration_simplex_range()) {
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : simplex.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> "
+ std::clog << ") -> "
<< "[" << simplex.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
return 0;
diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
index 220a66de..dba1710e 100644
--- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
@@ -30,7 +30,7 @@ int main(int argc, char **argv) {
ouput_file_stream.open(std::string(argv[3]));
streambuffer = ouput_file_stream.rdbuf();
} else {
- streambuffer = std::cout.rdbuf();
+ streambuffer = std::clog.rdbuf();
}
Gudhi::Simplex_tree<> simplex;
diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
index 6526ca3a..c79535bf 100644
--- a/src/Alpha_complex/example/Alpha_complex_from_points.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
@@ -35,18 +35,18 @@ int main() {
// ----------------------------------------------------------------------------
// Display information about the alpha complex
// ----------------------------------------------------------------------------
- std::cout << "Alpha complex is of dimension " << simplex.dimension() <<
+ std::clog << "Alpha complex is of dimension " << simplex.dimension() <<
" - " << simplex.num_simplices() << " simplices - " <<
simplex.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
for (auto f_simplex : simplex.filtration_simplex_range()) {
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : simplex.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> " << "[" << simplex.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << ") -> " << "[" << simplex.filtration(f_simplex) << "] ";
+ std::clog << std::endl;
}
}
return 0;
diff --git a/src/Alpha_complex/example/Fast_alpha_complex_from_off.cpp b/src/Alpha_complex/example/Fast_alpha_complex_from_off.cpp
index f181005a..64728470 100644
--- a/src/Alpha_complex/example/Fast_alpha_complex_from_off.cpp
+++ b/src/Alpha_complex/example/Fast_alpha_complex_from_off.cpp
@@ -35,7 +35,7 @@ int main(int argc, char **argv) {
ouput_file_stream.open(std::string(argv[3]));
streambuffer = ouput_file_stream.rdbuf();
} else {
- streambuffer = std::cout.rdbuf();
+ streambuffer = std::clog.rdbuf();
}
Gudhi::Simplex_tree<> simplex;
diff --git a/src/Alpha_complex/example/Weighted_alpha_complex_3d_from_points.cpp b/src/Alpha_complex/example/Weighted_alpha_complex_3d_from_points.cpp
index fcf80802..c044194e 100644
--- a/src/Alpha_complex/example/Weighted_alpha_complex_3d_from_points.cpp
+++ b/src/Alpha_complex/example/Weighted_alpha_complex_3d_from_points.cpp
@@ -34,18 +34,18 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
// Display information about the alpha complex
// ----------------------------------------------------------------------------
- std::cout << "Alpha complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
+ std::clog << "Alpha complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
<< " simplices - " << simplex.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
for (auto f_simplex : simplex.filtration_simplex_range()) {
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : simplex.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> "
+ std::clog << ") -> "
<< "[" << simplex.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
return 0;
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index f2a05e95..ba91998d 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -112,9 +112,6 @@ class Alpha_complex {
typedef typename Kernel::Side_of_bounded_sphere_d Is_Gabriel;
typedef typename Kernel::Point_dimension_d Point_Dimension;
- // Type required to compute squared radius, or side of bounded sphere on a vector of points.
- typedef typename std::vector<Point_d> Vector_of_CGAL_points;
-
// Vertex_iterator type from CGAL.
typedef typename Delaunay_triangulation::Vertex_iterator CGAL_vertex_iterator;
@@ -124,6 +121,9 @@ class Alpha_complex {
// Structure to switch from simplex tree vertex handle to CGAL vertex iterator.
typedef typename std::vector< CGAL_vertex_iterator > Vector_vertex_iterator;
+ // Numeric type of coordinates in the kernel
+ typedef typename Kernel::FT FT;
+
private:
/** \brief Vertex iterator vector to switch from simplex tree vertex handle to CGAL vertex iterator.
* Vertex handles are inserted sequentially, starting at 0.*/
@@ -132,6 +132,8 @@ class Alpha_complex {
Delaunay_triangulation* triangulation_;
/** \brief Kernel for triangulation_ functions access.*/
Kernel kernel_;
+ /** \brief Cache for geometric constructions: circumcenter and squared radius of a simplex.*/
+ std::vector<std::pair<Point_d, FT>> cache_, old_cache_;
public:
/** \brief Alpha_complex constructor from an OFF file name.
@@ -237,7 +239,7 @@ class Alpha_complex {
for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
if (!triangulation_->is_infinite(*vit)) {
#ifdef DEBUG_TRACES
- std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
+ std::clog << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
#endif // DEBUG_TRACES
vertex_handle_to_iterator_[vit->data()] = vit;
}
@@ -246,18 +248,66 @@ class Alpha_complex {
}
}
+ /** \brief get_point_ returns the point corresponding to the vertex given as parameter.
+ * Only for internal use for faster access.
+ *
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ * @return The point found.
+ */
+ const Point_d& get_point_(std::size_t vertex) const {
+ return vertex_handle_to_iterator_[vertex]->point();
+ }
+
+ /// Return a reference to the circumcenter and circumradius, writing them in the cache if necessary.
+ template<class SimplicialComplexForAlpha>
+ auto& get_cache(SimplicialComplexForAlpha& cplx, typename SimplicialComplexForAlpha::Simplex_handle s) {
+ auto k = cplx.key(s);
+ if(k==cplx.null_key()){
+ k = cache_.size();
+ cplx.assign_key(s, k);
+ // Using a transform_range is slower, currently.
+ thread_local std::vector<Point_d> v;
+ v.clear();
+ for (auto vertex : cplx.simplex_vertex_range(s))
+ v.push_back(get_point_(vertex));
+ Point_d c = kernel_.construct_circumcenter_d_object()(v.cbegin(), v.cend());
+ FT r = kernel_.squared_distance_d_object()(c, v[0]);
+ cache_.emplace_back(std::move(c), std::move(r));
+ }
+ return cache_[k];
+ }
+
+ /// Return the circumradius, either from the old cache or computed, without writing to the cache.
+ template<class SimplicialComplexForAlpha>
+ auto radius(SimplicialComplexForAlpha& cplx, typename SimplicialComplexForAlpha::Simplex_handle s) {
+ auto k = cplx.key(s);
+ if(k!=cplx.null_key())
+ return old_cache_[k].second;
+ // Using a transform_range is slower, currently.
+ thread_local std::vector<Point_d> v;
+ v.clear();
+ for (auto vertex : cplx.simplex_vertex_range(s))
+ v.push_back(get_point_(vertex));
+ return kernel_.compute_squared_radius_d_object()(v.cbegin(), v.cend());
+ }
+
public:
/** \brief Inserts all Delaunay triangulation into the simplicial complex.
- * It also computes the filtration values accordingly to the \ref createcomplexalgorithm
+ * It also computes the filtration values accordingly to the \ref createcomplexalgorithm if default_filtration_value
+ * is not set.
*
* \tparam SimplicialComplexForAlpha must meet `SimplicialComplexForAlpha` concept.
*
* @param[in] complex SimplicialComplexForAlpha to be created.
* @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$, and there is very
- * little point using anything else since it does not save time.
+ * little point using anything else since it does not save time. Useless if `default_filtration_value` is set to
+ * `true`.
* @param[in] exact Exact filtration values computation. Not exact if `Kernel` is not <a target="_blank"
* href="https://doc.cgal.org/latest/Kernel_d/structCGAL_1_1Epeck__d.html">CGAL::Epeck_d</a>.
- *
+ * @param[in] default_filtration_value Set this value to `true` if filtration values are not needed to be computed
+ * (will be set to `NaN`).
+ * Default value is `false` (which means compute the filtration values).
+ *
* @return true if creation succeeds, false otherwise.
*
* @pre Delaunay triangulation must be already constructed with dimension strictly greater than 0.
@@ -269,7 +319,8 @@ class Alpha_complex {
typename Filtration_value = typename SimplicialComplexForAlpha::Filtration_value>
bool create_complex(SimplicialComplexForAlpha& complex,
Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity(),
- bool exact = false) {
+ bool exact = false,
+ bool default_filtration_value = false) {
// From SimplicialComplexForAlpha type required to insert into a simplicial complex (with or without subfaces).
typedef typename SimplicialComplexForAlpha::Vertex_handle Vertex_handle;
typedef typename SimplicialComplexForAlpha::Simplex_handle Simplex_handle;
@@ -296,19 +347,19 @@ class Alpha_complex {
++cit) {
Vector_vertex vertexVector;
#ifdef DEBUG_TRACES
- std::cout << "Simplex_tree insertion ";
+ std::clog << "Simplex_tree insertion ";
#endif // DEBUG_TRACES
for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
if (*vit != nullptr) {
#ifdef DEBUG_TRACES
- std::cout << " " << (*vit)->data();
+ std::clog << " " << (*vit)->data();
#endif // DEBUG_TRACES
// Vector of vertex construction for simplex_tree structure
vertexVector.push_back((*vit)->data());
}
}
#ifdef DEBUG_TRACES
- std::cout << std::endl;
+ std::clog << std::endl;
#endif // DEBUG_TRACES
// Insert each simplex and its subfaces in the simplex tree - filtration is NaN
complex.insert_simplex_and_subfaces(vertexVector, std::numeric_limits<double>::quiet_NaN());
@@ -316,62 +367,48 @@ class Alpha_complex {
}
// --------------------------------------------------------------------------------------------
- // --------------------------------------------------------------------------------------------
- // Will be re-used many times
- Vector_of_CGAL_points pointVector;
- // ### For i : d -> 0
- for (int decr_dim = triangulation_->maximal_dimension(); decr_dim >= 0; decr_dim--) {
- // ### Foreach Sigma of dim i
- for (Simplex_handle f_simplex : complex.skeleton_simplex_range(decr_dim)) {
- int f_simplex_dim = complex.dimension(f_simplex);
- if (decr_dim == f_simplex_dim) {
- pointVector.clear();
-#ifdef DEBUG_TRACES
- std::cout << "Sigma of dim " << decr_dim << " is";
-#endif // DEBUG_TRACES
- for (auto vertex : complex.simplex_vertex_range(f_simplex)) {
- pointVector.push_back(get_point(vertex));
-#ifdef DEBUG_TRACES
- std::cout << " " << vertex;
-#endif // DEBUG_TRACES
- }
-#ifdef DEBUG_TRACES
- std::cout << std::endl;
-#endif // DEBUG_TRACES
- // ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma)
- if (std::isnan(complex.filtration(f_simplex))) {
- Filtration_value alpha_complex_filtration = 0.0;
- // No need to compute squared_radius on a single point - alpha is 0.0
- if (f_simplex_dim > 0) {
- // squared_radius function initialization
- Squared_Radius squared_radius = kernel_.compute_squared_radius_d_object();
-
- CGAL::NT_converter<typename Geom_traits::FT, Filtration_value> cv;
- auto sqrad = squared_radius(pointVector.begin(), pointVector.end());
+ if (!default_filtration_value) {
+ // --------------------------------------------------------------------------------------------
+ // ### For i : d -> 0
+ for (int decr_dim = triangulation_->maximal_dimension(); decr_dim >= 0; decr_dim--) {
+ // ### Foreach Sigma of dim i
+ for (Simplex_handle f_simplex : complex.skeleton_simplex_range(decr_dim)) {
+ int f_simplex_dim = complex.dimension(f_simplex);
+ if (decr_dim == f_simplex_dim) {
+ // ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma)
+ if (std::isnan(complex.filtration(f_simplex))) {
+ Filtration_value alpha_complex_filtration = 0.0;
+ // No need to compute squared_radius on a single point - alpha is 0.0
+ if (f_simplex_dim > 0) {
+ auto const& sqrad = radius(complex, f_simplex);
#if CGAL_VERSION_NR >= 1050000000
- if(exact) CGAL::exact(sqrad);
+ if(exact) CGAL::exact(sqrad);
#endif
- alpha_complex_filtration = cv(sqrad);
- }
- complex.assign_filtration(f_simplex, alpha_complex_filtration);
+ CGAL::NT_converter<FT, Filtration_value> cv;
+ alpha_complex_filtration = cv(sqrad);
+ }
+ complex.assign_filtration(f_simplex, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << complex.filtration(f_simplex) << std::endl;
+ std::clog << "filt(Sigma) is NaN : filt(Sigma) =" << complex.filtration(f_simplex) << std::endl;
#endif // DEBUG_TRACES
+ }
+ // No need to propagate further, unweighted points all have value 0
+ if (decr_dim > 1)
+ propagate_alpha_filtration(complex, f_simplex);
}
- // No need to propagate further, unweighted points all have value 0
- if (decr_dim > 1)
- propagate_alpha_filtration(complex, f_simplex);
}
+ old_cache_ = std::move(cache_);
+ cache_.clear();
}
+ // --------------------------------------------------------------------------------------------
+
+ // --------------------------------------------------------------------------------------------
+ // As Alpha value is an approximation, we have to make filtration non decreasing while increasing the dimension
+ complex.make_filtration_non_decreasing();
+ // Remove all simplices that have a filtration value greater than max_alpha_square
+ complex.prune_above_filtration(max_alpha_square);
+ // --------------------------------------------------------------------------------------------
}
- // --------------------------------------------------------------------------------------------
-
- // --------------------------------------------------------------------------------------------
- // As Alpha value is an approximation, we have to make filtration non decreasing while increasing the dimension
- complex.make_filtration_non_decreasing();
- // Remove all simplices that have a filtration value greater than max_alpha_square
- complex.prune_above_filtration(max_alpha_square);
- // --------------------------------------------------------------------------------------------
return true;
}
@@ -380,20 +417,18 @@ class Alpha_complex {
void propagate_alpha_filtration(SimplicialComplexForAlpha& complex, Simplex_handle f_simplex) {
// From SimplicialComplexForAlpha type required to assign filtration values.
typedef typename SimplicialComplexForAlpha::Filtration_value Filtration_value;
-#ifdef DEBUG_TRACES
typedef typename SimplicialComplexForAlpha::Vertex_handle Vertex_handle;
-#endif // DEBUG_TRACES
// ### Foreach Tau face of Sigma
for (auto f_boundary : complex.boundary_simplex_range(f_simplex)) {
#ifdef DEBUG_TRACES
- std::cout << " | --------------------------------------------------\n";
- std::cout << " | Tau ";
+ std::clog << " | --------------------------------------------------\n";
+ std::clog << " | Tau ";
for (auto vertex : complex.simplex_vertex_range(f_boundary)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << "is a face of Sigma\n";
- std::cout << " | isnan(complex.filtration(Tau)=" << std::isnan(complex.filtration(f_boundary)) << std::endl;
+ std::clog << "is a face of Sigma\n";
+ std::clog << " | isnan(complex.filtration(Tau)=" << std::isnan(complex.filtration(f_boundary)) << std::endl;
#endif // DEBUG_TRACES
// ### If filt(Tau) is not NaN
if (!std::isnan(complex.filtration(f_boundary))) {
@@ -402,37 +437,22 @@ class Alpha_complex {
complex.filtration(f_simplex));
complex.assign_filtration(f_boundary, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << complex.filtration(f_boundary) << std::endl;
+ std::clog << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << complex.filtration(f_boundary) << std::endl;
#endif // DEBUG_TRACES
// ### Else
} else {
- // insert the Tau points in a vector for is_gabriel function
- Vector_of_CGAL_points pointVector;
-#ifdef DEBUG_TRACES
- Vertex_handle vertexForGabriel = Vertex_handle();
-#endif // DEBUG_TRACES
- for (auto vertex : complex.simplex_vertex_range(f_boundary)) {
- pointVector.push_back(get_point(vertex));
- }
- // Retrieve the Sigma point that is not part of Tau - parameter for is_gabriel function
- Point_d point_for_gabriel;
- for (auto vertex : complex.simplex_vertex_range(f_simplex)) {
- point_for_gabriel = get_point(vertex);
- if (std::find(pointVector.begin(), pointVector.end(), point_for_gabriel) == pointVector.end()) {
-#ifdef DEBUG_TRACES
- // vertex is not found in Tau
- vertexForGabriel = vertex;
-#endif // DEBUG_TRACES
- // No need to continue loop
- break;
- }
- }
- // is_gabriel function initialization
- Is_Gabriel is_gabriel = kernel_.side_of_bounded_sphere_d_object();
- bool is_gab = is_gabriel(pointVector.begin(), pointVector.end(), point_for_gabriel)
- != CGAL::ON_BOUNDED_SIDE;
+ // Find which vertex of f_simplex is missing in f_boundary. We could actually write a variant of boundary_simplex_range that gives pairs (f_boundary, vertex). We rely on the fact that simplex_vertex_range is sorted.
+ auto longlist = complex.simplex_vertex_range(f_simplex);
+ auto shortlist = complex.simplex_vertex_range(f_boundary);
+ auto longiter = std::begin(longlist);
+ auto shortiter = std::begin(shortlist);
+ auto enditer = std::end(shortlist);
+ while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; }
+ Vertex_handle extra = *longiter;
+ auto const& cache=get_cache(complex, f_boundary);
+ bool is_gab = kernel_.squared_distance_d_object()(cache.first, get_point_(extra)) >= cache.second;
#ifdef DEBUG_TRACES
- std::cout << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << vertexForGabriel << std::endl;
+ std::clog << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << extra << std::endl;
#endif // DEBUG_TRACES
// ### If Tau is not Gabriel of Sigma
if (false == is_gab) {
@@ -440,7 +460,7 @@ class Alpha_complex {
Filtration_value alpha_complex_filtration = complex.filtration(f_simplex);
complex.assign_filtration(f_boundary, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << " | filt(Tau) = filt(Sigma) = " << complex.filtration(f_boundary) << std::endl;
+ std::clog << " | filt(Tau) = filt(Sigma) = " << complex.filtration(f_boundary) << std::endl;
#endif // DEBUG_TRACES
}
}
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h
index 7f96c94c..c19ebb79 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex_3d.h
@@ -61,10 +61,7 @@ namespace Gudhi {
namespace alpha_complex {
-#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
-thread_local
-#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- double RELATIVE_PRECISION_OF_TO_DOUBLE = 0.00001;
+thread_local double RELATIVE_PRECISION_OF_TO_DOUBLE = 0.00001;
// Value_from_iterator returns the filtration value from an iterator on alpha shapes values
//
@@ -472,7 +469,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
alpha_shape_3_ptr_->filtration_with_alpha_values(dispatcher);
#ifdef DEBUG_TRACES
- std::cout << "filtration_with_alpha_values returns : " << objects.size() << " objects" << std::endl;
+ std::clog << "filtration_with_alpha_values returns : " << objects.size() << " objects" << std::endl;
#endif // DEBUG_TRACES
using Alpha_value_iterator = typename std::vector<FT>::const_iterator;
@@ -484,7 +481,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
for (auto i = 0; i < 4; i++) {
#ifdef DEBUG_TRACES
- std::cout << "from cell[" << i << "] - Point coordinates (" << (*cell)->vertex(i)->point() << ")"
+ std::clog << "from cell[" << i << "] - Point coordinates (" << (*cell)->vertex(i)->point() << ")"
<< std::endl;
#endif // DEBUG_TRACES
vertex_list.push_back((*cell)->vertex(i));
@@ -496,7 +493,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
for (auto i = 0; i < 4; i++) {
if ((*facet).second != i) {
#ifdef DEBUG_TRACES
- std::cout << "from facet=[" << i << "] - Point coordinates (" << (*facet).first->vertex(i)->point() << ")"
+ std::clog << "from facet=[" << i << "] - Point coordinates (" << (*facet).first->vertex(i)->point() << ")"
<< std::endl;
#endif // DEBUG_TRACES
vertex_list.push_back((*facet).first->vertex(i));
@@ -508,7 +505,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
} else if (const Edge* edge = CGAL::object_cast<Edge>(&object_iterator)) {
for (auto i : {(*edge).second, (*edge).third}) {
#ifdef DEBUG_TRACES
- std::cout << "from edge[" << i << "] - Point coordinates (" << (*edge).first->vertex(i)->point() << ")"
+ std::clog << "from edge[" << i << "] - Point coordinates (" << (*edge).first->vertex(i)->point() << ")"
<< std::endl;
#endif // DEBUG_TRACES
vertex_list.push_back((*edge).first->vertex(i));
@@ -519,7 +516,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
} else if (const Alpha_vertex_handle* vertex = CGAL::object_cast<Alpha_vertex_handle>(&object_iterator)) {
#ifdef DEBUG_TRACES
count_vertices++;
- std::cout << "from vertex - Point coordinates (" << (*vertex)->point() << ")" << std::endl;
+ std::clog << "from vertex - Point coordinates (" << (*vertex)->point() << ")" << std::endl;
#endif // DEBUG_TRACES
vertex_list.push_back((*vertex));
}
@@ -531,7 +528,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
// alpha shape not found
Complex_vertex_handle vertex = map_cgal_simplex_tree.size();
#ifdef DEBUG_TRACES
- std::cout << "Point (" << the_alpha_shape_vertex->point() << ") not found - insert new vertex id " << vertex
+ std::clog << "Point (" << the_alpha_shape_vertex->point() << ") not found - insert new vertex id " << vertex
<< std::endl;
#endif // DEBUG_TRACES
the_simplex.push_back(vertex);
@@ -540,7 +537,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
// alpha shape found
Complex_vertex_handle vertex = the_map_iterator->second;
#ifdef DEBUG_TRACES
- std::cout << "Point (" << the_alpha_shape_vertex->point() << ") found as vertex id " << vertex << std::endl;
+ std::clog << "Point (" << the_alpha_shape_vertex->point() << ") found as vertex id " << vertex << std::endl;
#endif // DEBUG_TRACES
the_simplex.push_back(vertex);
}
@@ -549,7 +546,7 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
Filtration_value filtr = Value_from_iterator<Complexity>::perform(alpha_value_iterator);
#ifdef DEBUG_TRACES
- std::cout << "filtration = " << filtr << std::endl;
+ std::clog << "filtration = " << filtr << std::endl;
#endif // DEBUG_TRACES
complex.insert_simplex(the_simplex, static_cast<Filtration_value>(filtr));
GUDHI_CHECK(alpha_value_iterator != alpha_values.end(), "CGAL provided more simplices than values");
@@ -557,10 +554,10 @@ Weighted_alpha_complex_3d::Weighted_point_3 wp0(Weighted_alpha_complex_3d::Bare_
}
#ifdef DEBUG_TRACES
- std::cout << "vertices \t" << count_vertices << std::endl;
- std::cout << "edges \t\t" << count_edges << std::endl;
- std::cout << "facets \t\t" << count_facets << std::endl;
- std::cout << "cells \t\t" << count_cells << std::endl;
+ std::clog << "vertices \t" << count_vertices << std::endl;
+ std::clog << "edges \t\t" << count_edges << std::endl;
+ std::clog << "facets \t\t" << count_facets << std::endl;
+ std::clog << "cells \t\t" << count_cells << std::endl;
#endif // DEBUG_TRACES
// --------------------------------------------------------------------------------------------
// As Alpha value is an approximation, we have to make filtration non decreasing while increasing the dimension
diff --git a/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp
index cd698a27..a4ecb6ad 100644
--- a/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_3d_unit_test.cpp
@@ -54,7 +54,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_3d_from_points) {
// -----------------
// Fast version
// -----------------
- std::cout << "Fast alpha complex 3d" << std::endl;
+ std::clog << "Fast alpha complex 3d" << std::endl;
std::vector<Fast_alpha_complex_3d::Bare_point_3> points = get_points<Fast_alpha_complex_3d::Bare_point_3>();
Fast_alpha_complex_3d alpha_complex(points);
@@ -79,7 +79,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_3d_from_points) {
// -----------------
// Exact version
// -----------------
- std::cout << "Exact alpha complex 3d" << std::endl;
+ std::clog << "Exact alpha complex 3d" << std::endl;
std::vector<Exact_alpha_complex_3d::Bare_point_3> exact_points = get_points<Exact_alpha_complex_3d::Bare_point_3>();
Exact_alpha_complex_3d exact_alpha_complex(exact_points);
@@ -105,13 +105,13 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_3d_from_points) {
// ---------------------
// Compare both versions
// ---------------------
- std::cout << "Exact Alpha complex 3d is of dimension " << exact_stree.dimension() << " - Fast is "
+ std::clog << "Exact Alpha complex 3d is of dimension " << exact_stree.dimension() << " - Fast is "
<< stree.dimension() << std::endl;
BOOST_CHECK(exact_stree.dimension() == stree.dimension());
- std::cout << "Exact Alpha complex 3d num_simplices " << exact_stree.num_simplices() << " - Fast is "
+ std::clog << "Exact Alpha complex 3d num_simplices " << exact_stree.num_simplices() << " - Fast is "
<< stree.num_simplices() << std::endl;
BOOST_CHECK(exact_stree.num_simplices() == stree.num_simplices());
- std::cout << "Exact Alpha complex 3d num_vertices " << exact_stree.num_vertices() << " - Fast is "
+ std::clog << "Exact Alpha complex 3d num_vertices " << exact_stree.num_vertices() << " - Fast is "
<< stree.num_vertices() << std::endl;
BOOST_CHECK(exact_stree.num_vertices() == stree.num_vertices());
@@ -119,18 +119,18 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_3d_from_points) {
while (sh != stree.filtration_simplex_range().end()) {
std::vector<int> simplex;
std::vector<int> exact_simplex;
- std::cout << "Fast ( ";
+ std::clog << "Fast ( ";
for (auto vertex : stree.simplex_vertex_range(*sh)) {
simplex.push_back(vertex);
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> [" << stree.filtration(*sh) << "] ";
+ std::clog << ") -> [" << stree.filtration(*sh) << "] ";
// Find it in the exact structure
auto sh_exact = exact_stree.find(simplex);
BOOST_CHECK(sh_exact != exact_stree.null_simplex());
- std::cout << " versus [" << exact_stree.filtration(sh_exact) << "] " << std::endl;
+ std::clog << " versus [" << exact_stree.filtration(sh_exact) << "] " << std::endl;
// Exact and non-exact version is not exactly the same due to float comparison
GUDHI_TEST_FLOAT_EQUALITY_CHECK(exact_stree.filtration(sh_exact), stree.filtration(*sh));
@@ -139,7 +139,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_3d_from_points) {
// -----------------
// Safe version
// -----------------
- std::cout << "Safe alpha complex 3d" << std::endl;
+ std::clog << "Safe alpha complex 3d" << std::endl;
std::vector<Safe_alpha_complex_3d::Bare_point_3> safe_points = get_points<Safe_alpha_complex_3d::Bare_point_3>();
Safe_alpha_complex_3d safe_alpha_complex(safe_points);
@@ -165,13 +165,13 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_3d_from_points) {
// ---------------------
// Compare both versions
// ---------------------
- std::cout << "Safe Alpha complex 3d is of dimension " << safe_stree.dimension() << " - Fast is "
+ std::clog << "Safe Alpha complex 3d is of dimension " << safe_stree.dimension() << " - Fast is "
<< stree.dimension() << std::endl;
BOOST_CHECK(safe_stree.dimension() == stree.dimension());
- std::cout << "Safe Alpha complex 3d num_simplices " << safe_stree.num_simplices() << " - Fast is "
+ std::clog << "Safe Alpha complex 3d num_simplices " << safe_stree.num_simplices() << " - Fast is "
<< stree.num_simplices() << std::endl;
BOOST_CHECK(safe_stree.num_simplices() == stree.num_simplices());
- std::cout << "Safe Alpha complex 3d num_vertices " << safe_stree.num_vertices() << " - Fast is "
+ std::clog << "Safe Alpha complex 3d num_vertices " << safe_stree.num_vertices() << " - Fast is "
<< stree.num_vertices() << std::endl;
BOOST_CHECK(safe_stree.num_vertices() == stree.num_vertices());
@@ -179,18 +179,18 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_3d_from_points) {
while (safe_sh != stree.filtration_simplex_range().end()) {
std::vector<int> simplex;
std::vector<int> exact_simplex;
- std::cout << "Fast ( ";
+ std::clog << "Fast ( ";
for (auto vertex : stree.simplex_vertex_range(*safe_sh)) {
simplex.push_back(vertex);
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> [" << stree.filtration(*safe_sh) << "] ";
+ std::clog << ") -> [" << stree.filtration(*safe_sh) << "] ";
// Find it in the exact structure
auto sh_exact = safe_stree.find(simplex);
BOOST_CHECK(sh_exact != safe_stree.null_simplex());
- std::cout << " versus [" << safe_stree.filtration(sh_exact) << "] " << std::endl;
+ std::clog << " versus [" << safe_stree.filtration(sh_exact) << "] " << std::endl;
// Exact and non-exact version is not exactly the same due to float comparison
GUDHI_TEST_FLOAT_EQUALITY_CHECK(safe_stree.filtration(sh_exact), stree.filtration(*safe_sh), 1e-15);
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index 27b671dd..4b37e4bd 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -48,7 +48,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of
// ----------------------------------------------------------------------------
std::string off_file_name("alphacomplexdoc.off");
double max_alpha_square_value = 60.0;
- std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ std::clog << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
max_alpha_square_value << "==========" << std::endl;
Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_file(off_file_name);
@@ -56,29 +56,29 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of
Gudhi::Simplex_tree<> simplex_tree_60;
BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
- std::cout << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
+ std::clog << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
BOOST_CHECK(simplex_tree_60.dimension() == 2);
- std::cout << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
+ std::clog << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree_60.num_vertices() == 7);
- std::cout << "simplex_tree_60.num_simplices()=" << simplex_tree_60.num_simplices() << std::endl;
+ std::clog << "simplex_tree_60.num_simplices()=" << simplex_tree_60.num_simplices() << std::endl;
BOOST_CHECK(simplex_tree_60.num_simplices() == 25);
max_alpha_square_value = 59.0;
- std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ std::clog << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
max_alpha_square_value << "==========" << std::endl;
Gudhi::Simplex_tree<> simplex_tree_59;
BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_59, max_alpha_square_value));
- std::cout << "simplex_tree_59.dimension()=" << simplex_tree_59.dimension() << std::endl;
+ std::clog << "simplex_tree_59.dimension()=" << simplex_tree_59.dimension() << std::endl;
BOOST_CHECK(simplex_tree_59.dimension() == 2);
- std::cout << "simplex_tree_59.num_vertices()=" << simplex_tree_59.num_vertices() << std::endl;
+ std::clog << "simplex_tree_59.num_vertices()=" << simplex_tree_59.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree_59.num_vertices() == 7);
- std::cout << "simplex_tree_59.num_simplices()=" << simplex_tree_59.num_simplices() << std::endl;
+ std::clog << "simplex_tree_59.num_simplices()=" << simplex_tree_59.num_simplices() << std::endl;
BOOST_CHECK(simplex_tree_59.num_simplices() == 23);
}
@@ -115,30 +115,30 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
// ----------------------------------------------------------------------------
Gudhi::alpha_complex::Alpha_complex<Kernel_4> alpha_complex_from_points(points);
- std::cout << "========== Alpha_complex_from_points ==========" << std::endl;
+ std::clog << "========== Alpha_complex_from_points ==========" << std::endl;
Gudhi::Simplex_tree<> simplex_tree;
BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
// Another way to check num_simplices
- std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
num_simplices++;
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << std::endl;
}
BOOST_CHECK(num_simplices == 15);
- std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ std::clog << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
BOOST_CHECK(simplex_tree.num_simplices() == 15);
- std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ std::clog << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
BOOST_CHECK(simplex_tree.dimension() == 3);
- std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ std::clog << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree.num_vertices() == points.size());
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
@@ -162,22 +162,22 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
Point_4 p0 = alpha_complex_from_points.get_point(0);
- std::cout << "alpha_complex_from_points.get_point(0)=" << p0 << std::endl;
+ std::clog << "alpha_complex_from_points.get_point(0)=" << p0 << std::endl;
BOOST_CHECK(4 == p0.dimension());
BOOST_CHECK(is_point_in_list(points, p0));
Point_4 p1 = alpha_complex_from_points.get_point(1);
- std::cout << "alpha_complex_from_points.get_point(1)=" << p1 << std::endl;
+ std::clog << "alpha_complex_from_points.get_point(1)=" << p1 << std::endl;
BOOST_CHECK(4 == p1.dimension());
BOOST_CHECK(is_point_in_list(points, p1));
Point_4 p2 = alpha_complex_from_points.get_point(2);
- std::cout << "alpha_complex_from_points.get_point(2)=" << p2 << std::endl;
+ std::clog << "alpha_complex_from_points.get_point(2)=" << p2 << std::endl;
BOOST_CHECK(4 == p2.dimension());
BOOST_CHECK(is_point_in_list(points, p2));
Point_4 p3 = alpha_complex_from_points.get_point(3);
- std::cout << "alpha_complex_from_points.get_point(3)=" << p3 << std::endl;
+ std::clog << "alpha_complex_from_points.get_point(3)=" << p3 << std::endl;
BOOST_CHECK(4 == p3.dimension());
BOOST_CHECK(is_point_in_list(points, p3));
@@ -188,30 +188,27 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
// Test after prune_above_filtration
bool modified = simplex_tree.prune_above_filtration(0.6);
- if (modified) {
- simplex_tree.initialize_filtration();
- }
BOOST_CHECK(modified);
// Another way to check num_simplices
- std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
num_simplices = 0;
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
num_simplices++;
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << std::endl;
}
BOOST_CHECK(num_simplices == 10);
- std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ std::clog << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
BOOST_CHECK(simplex_tree.num_simplices() == 10);
- std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ std::clog << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
BOOST_CHECK(simplex_tree.dimension() == 1);
- std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ std::clog << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree.num_vertices() == 4);
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
@@ -231,7 +228,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, list_of_kernel_variants) {
- std::cout << "========== Alpha_complex_from_empty_points ==========" << std::endl;
+ std::clog << "========== Alpha_complex_from_empty_points ==========" << std::endl;
// ----------------------------------------------------------------------------
// Init of an empty list of points
@@ -249,13 +246,13 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, lis
Gudhi::Simplex_tree<> simplex_tree;
BOOST_CHECK(!alpha_complex_from_points.create_complex(simplex_tree));
- std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ std::clog << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
BOOST_CHECK(simplex_tree.num_simplices() == 0);
- std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ std::clog << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
BOOST_CHECK(simplex_tree.dimension() == -1);
- std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ std::clog << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree.num_vertices() == points.size());
}
@@ -264,7 +261,7 @@ using Exact_kernel_2 = CGAL::Epeck_d< CGAL::Dimension_tag<2> >;
using list_of_kernel_2_variants = boost::mpl::list<Inexact_kernel_2, Exact_kernel_2>;
BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_with_duplicated_points, TestedKernel, list_of_kernel_2_variants) {
- std::cout << "========== Alpha_complex_with_duplicated_points ==========" << std::endl;
+ std::clog << "========== Alpha_complex_with_duplicated_points ==========" << std::endl;
using Point = typename TestedKernel::Point_d;
using Vector_of_points = std::vector<Point>;
@@ -287,14 +284,14 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_with_duplicated_points, TestedKernel
// ----------------------------------------------------------------------------
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
- std::cout << "Init" << std::endl;
+ std::clog << "Init" << std::endl;
Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_points(points);
Gudhi::Simplex_tree<> simplex_tree;
- std::cout << "create_complex" << std::endl;
+ std::clog << "create_complex" << std::endl;
BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
- std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices()
+ std::clog << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices()
<< std::endl;
BOOST_CHECK(simplex_tree.num_vertices() < points.size());
}
diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt
index 0476c6d4..fe4b23e4 100644
--- a/src/Alpha_complex/test/CMakeLists.txt
+++ b/src/Alpha_complex/test/CMakeLists.txt
@@ -8,11 +8,15 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
add_executable ( Alpha_complex_test_unit Alpha_complex_unit_test.cpp )
target_link_libraries(Alpha_complex_test_unit ${CGAL_LIBRARY})
+ add_executable ( Delaunay_complex_test_unit Delaunay_complex_unit_test.cpp )
+ target_link_libraries(Delaunay_complex_test_unit ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Alpha_complex_test_unit ${TBB_LIBRARIES})
+ target_link_libraries(Delaunay_complex_test_unit ${TBB_LIBRARIES})
endif()
gudhi_add_boost_test(Alpha_complex_test_unit)
+ gudhi_add_boost_test(Delaunay_complex_test_unit)
add_executable ( Alpha_complex_3d_test_unit Alpha_complex_3d_unit_test.cpp )
target_link_libraries(Alpha_complex_3d_test_unit ${CGAL_LIBRARY})
diff --git a/src/Alpha_complex/test/Delaunay_complex_unit_test.cpp b/src/Alpha_complex/test/Delaunay_complex_unit_test.cpp
new file mode 100644
index 00000000..c1cc1fab
--- /dev/null
+++ b/src/Alpha_complex/test/Delaunay_complex_unit_test.cpp
@@ -0,0 +1,68 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2020 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "delaunay_complex"
+#include <boost/test/unit_test.hpp>
+#include <boost/mpl/list.hpp>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Epeck_d.h>
+
+#include <vector>
+#include <limits> // NaN
+#include <cmath>
+
+#include <gudhi/Alpha_complex.h>
+// to construct a simplex_tree from Delaunay_triangulation
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Unitary_tests_utils.h>
+#include <gudhi/random_point_generators.h>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epeck_d< CGAL::Dynamic_dimension_tag > Exact_kernel_d;
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epeck_d< CGAL::Dimension_tag<5> > Exact_kernel_s;
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Inexact_kernel_d;
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<5> > Inexact_kernel_s;
+// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter
+
+typedef boost::mpl::list<Exact_kernel_d, Exact_kernel_s, Inexact_kernel_d, Inexact_kernel_s> list_of_kernel_variants;
+
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Simplex_handle = Simplex_tree::Simplex_handle;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of_kernel_variants) {
+ std::cout << "*****************************************************************************************************";
+ using Point = typename TestedKernel::Point_d;
+ std::vector<Point> points;
+ // 50 points on a 4-sphere
+ points = Gudhi::generate_points_on_sphere_d<TestedKernel>(10, 5, 1.);
+
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex(points);
+
+ // Alpha complex
+ Simplex_tree stree_from_alpha_complex;
+ BOOST_CHECK(alpha_complex.create_complex(stree_from_alpha_complex));
+
+ // Delaunay complex
+ Simplex_tree stree_from_delaunay_complex;
+ BOOST_CHECK(alpha_complex.create_complex(stree_from_delaunay_complex, 0., false, true));
+
+ // Check all the simplices from alpha complex are in the Delaunay complex
+ for (auto f_simplex : stree_from_alpha_complex.complex_simplex_range()) {
+ Simplex_handle sh = stree_from_delaunay_complex.find(stree_from_alpha_complex.simplex_vertex_range(f_simplex));
+ BOOST_CHECK(std::isnan(stree_from_delaunay_complex.filtration(sh)));
+ BOOST_CHECK(sh != stree_from_delaunay_complex.null_simplex());
+ }
+}
diff --git a/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp
index 731763fa..9eef920b 100644
--- a/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp
+++ b/src/Alpha_complex/test/Periodic_alpha_complex_3d_unit_test.cpp
@@ -43,14 +43,14 @@ typedef boost::mpl::list<Fast_periodic_alpha_complex_3d, Safe_periodic_alpha_com
periodic_variants_type_list;
BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_periodic_throw, Periodic_alpha_complex_3d, periodic_variants_type_list) {
- std::cout << "Periodic alpha complex 3d exception throw" << std::endl;
+ std::clog << "Periodic alpha complex 3d exception throw" << std::endl;
using Bare_point_3 = typename Periodic_alpha_complex_3d::Bare_point_3;
std::vector<Bare_point_3> p_points;
// Not important, this is not what we want to check
p_points.push_back(Bare_point_3(0.0, 0.0, 0.0));
- std::cout << "Check exception throw in debug mode" << std::endl;
+ std::clog << "Check exception throw in debug mode" << std::endl;
// Check it throws an exception when the cuboid is not iso
BOOST_CHECK_THROW(Periodic_alpha_complex_3d periodic_alpha_complex(p_points, 0., 0., 0., 0.9, 1., 1.),
std::invalid_argument);
@@ -71,7 +71,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_periodic) {
// ---------------------
// Fast periodic version
// ---------------------
- std::cout << "Fast periodic alpha complex 3d" << std::endl;
+ std::clog << "Fast periodic alpha complex 3d" << std::endl;
using Creator = CGAL::Creator_uniform_3<double, Fast_periodic_alpha_complex_3d::Bare_point_3>;
CGAL::Random random(7);
@@ -106,7 +106,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_periodic) {
// ----------------------
// Exact periodic version
// ----------------------
- std::cout << "Exact periodic alpha complex 3d" << std::endl;
+ std::clog << "Exact periodic alpha complex 3d" << std::endl;
std::vector<Exact_periodic_alpha_complex_3d::Bare_point_3> e_p_points;
@@ -122,13 +122,13 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_periodic) {
// ---------------------
// Compare both versions
// ---------------------
- std::cout << "Exact periodic alpha complex 3d is of dimension " << exact_stree.dimension() << " - Non exact is "
+ std::clog << "Exact periodic alpha complex 3d is of dimension " << exact_stree.dimension() << " - Non exact is "
<< stree.dimension() << std::endl;
BOOST_CHECK(exact_stree.dimension() == stree.dimension());
- std::cout << "Exact periodic alpha complex 3d num_simplices " << exact_stree.num_simplices() << " - Non exact is "
+ std::clog << "Exact periodic alpha complex 3d num_simplices " << exact_stree.num_simplices() << " - Non exact is "
<< stree.num_simplices() << std::endl;
BOOST_CHECK(exact_stree.num_simplices() == stree.num_simplices());
- std::cout << "Exact periodic alpha complex 3d num_vertices " << exact_stree.num_vertices() << " - Non exact is "
+ std::clog << "Exact periodic alpha complex 3d num_vertices " << exact_stree.num_vertices() << " - Non exact is "
<< stree.num_vertices() << std::endl;
BOOST_CHECK(exact_stree.num_vertices() == stree.num_vertices());
@@ -155,7 +155,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_periodic) {
// ----------------------
// Safe periodic version
// ----------------------
- std::cout << "Safe periodic alpha complex 3d" << std::endl;
+ std::clog << "Safe periodic alpha complex 3d" << std::endl;
std::vector<Safe_periodic_alpha_complex_3d::Bare_point_3> s_p_points;
diff --git a/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp
index 8035f6e8..6b31bea6 100644
--- a/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp
+++ b/src/Alpha_complex/test/Weighted_alpha_complex_3d_unit_test.cpp
@@ -55,13 +55,13 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted_throw, Weighted_alpha_compl
// weights size is different from w_points size to make weighted Alpha_complex_3d throw in debug mode
std::vector<double> weights = {0.01, 0.005, 0.006, 0.01, 0.009, 0.001};
- std::cout << "Check exception throw in debug mode" << std::endl;
+ std::clog << "Check exception throw in debug mode" << std::endl;
BOOST_CHECK_THROW(Weighted_alpha_complex_3d wac(w_points, weights), std::invalid_argument);
}
#endif
BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted, Weighted_alpha_complex_3d, weighted_variants_type_list) {
- std::cout << "Weighted alpha complex 3d from points and weights" << std::endl;
+ std::clog << "Weighted alpha complex 3d from points and weights" << std::endl;
using Bare_point_3 = typename Weighted_alpha_complex_3d::Bare_point_3;
std::vector<Bare_point_3> w_points;
w_points.push_back(Bare_point_3(0.0, 0.0, 0.0));
@@ -78,7 +78,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted, Weighted_alpha_complex_3d,
Gudhi::Simplex_tree<> stree;
alpha_complex_p_a_w.create_complex(stree);
- std::cout << "Weighted alpha complex 3d from weighted points" << std::endl;
+ std::clog << "Weighted alpha complex 3d from weighted points" << std::endl;
using Weighted_point_3 = typename Weighted_alpha_complex_3d::Weighted_point_3;
std::vector<Weighted_point_3> weighted_points;
@@ -112,13 +112,13 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted, Weighted_alpha_complex_3d,
// ---------------------
// Compare both versions
// ---------------------
- std::cout << "Weighted alpha complex 3d is of dimension " << stree_bis.dimension() << " - versus "
+ std::clog << "Weighted alpha complex 3d is of dimension " << stree_bis.dimension() << " - versus "
<< stree.dimension() << std::endl;
BOOST_CHECK(stree_bis.dimension() == stree.dimension());
- std::cout << "Weighted alpha complex 3d num_simplices " << stree_bis.num_simplices() << " - versus "
+ std::clog << "Weighted alpha complex 3d num_simplices " << stree_bis.num_simplices() << " - versus "
<< stree.num_simplices() << std::endl;
BOOST_CHECK(stree_bis.num_simplices() == stree.num_simplices());
- std::cout << "Weighted alpha complex 3d num_vertices " << stree_bis.num_vertices() << " - versus "
+ std::clog << "Weighted alpha complex 3d num_vertices " << stree_bis.num_vertices() << " - versus "
<< stree.num_vertices() << std::endl;
BOOST_CHECK(stree_bis.num_vertices() == stree.num_vertices());
@@ -127,18 +127,18 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted, Weighted_alpha_complex_3d,
std::vector<int> simplex;
std::vector<int> exact_simplex;
#ifdef DEBUG_TRACES
- std::cout << " ( ";
+ std::clog << " ( ";
#endif
for (auto vertex : stree.simplex_vertex_range(*sh)) {
simplex.push_back(vertex);
#ifdef DEBUG_TRACES
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
#endif
}
#ifdef DEBUG_TRACES
- std::cout << ") -> "
+ std::clog << ") -> "
<< "[" << stree.filtration(*sh) << "] ";
- std::cout << std::endl;
+ std::clog << std::endl;
#endif
// Find it in the exact structure
diff --git a/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp b/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp
index b09e92d5..610b9f3d 100644
--- a/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp
+++ b/src/Alpha_complex/test/Weighted_periodic_alpha_complex_3d_unit_test.cpp
@@ -45,7 +45,7 @@ typedef boost::mpl::list<Fast_weighted_periodic_alpha_complex_3d, Exact_weighted
#ifdef GUDHI_DEBUG
BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted_periodic_throw, Weighted_periodic_alpha_complex_3d,
wp_variants_type_list) {
- std::cout << "Weighted periodic alpha complex 3d exception throw" << std::endl;
+ std::clog << "Weighted periodic alpha complex 3d exception throw" << std::endl;
using Creator = CGAL::Creator_uniform_3<double, typename Weighted_periodic_alpha_complex_3d::Bare_point_3>;
CGAL::Random random(7);
@@ -62,7 +62,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted_periodic_throw, Weighted_pe
p_weights.push_back(random.get_double(0., 0.01));
}
- std::cout << "Cuboid is not iso exception" << std::endl;
+ std::clog << "Cuboid is not iso exception" << std::endl;
// Check it throws an exception when the cuboid is not iso
BOOST_CHECK_THROW(
Weighted_periodic_alpha_complex_3d wp_alpha_complex(wp_points, p_weights, -1., -1., -1., 0.9, 1., 1.),
@@ -83,7 +83,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted_periodic_throw, Weighted_pe
Weighted_periodic_alpha_complex_3d wp_alpha_complex(wp_points, p_weights, -1., -1., -1., 1., 1., 1.1),
std::invalid_argument);
- std::cout << "0 <= point.weight() < 1/64 * domain_size * domain_size exception" << std::endl;
+ std::clog << "0 <= point.weight() < 1/64 * domain_size * domain_size exception" << std::endl;
// Weights must be in range ]0, 1/64 = 0.015625[
double temp = p_weights[25];
p_weights[25] = 1.0;
@@ -97,7 +97,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_weighted_periodic_throw, Weighted_pe
std::invalid_argument);
p_weights[14] = temp;
- std::cout << "wp_points and p_weights size exception" << std::endl;
+ std::clog << "wp_points and p_weights size exception" << std::endl;
// Weights and points must have the same size
// + 1
p_weights.push_back(1e-10);
@@ -115,7 +115,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_weighted_periodic) {
// ---------------------
// Fast weighted periodic version
// ---------------------
- std::cout << "Fast weighted periodic alpha complex 3d" << std::endl;
+ std::clog << "Fast weighted periodic alpha complex 3d" << std::endl;
using Creator = CGAL::Creator_uniform_3<double, Fast_weighted_periodic_alpha_complex_3d::Bare_point_3>;
CGAL::Random random(7);
@@ -140,7 +140,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_weighted_periodic) {
// ----------------------
// Exact weighted periodic version
// ----------------------
- std::cout << "Exact weighted periodic alpha complex 3d" << std::endl;
+ std::clog << "Exact weighted periodic alpha complex 3d" << std::endl;
std::vector<Exact_weighted_periodic_alpha_complex_3d::Bare_point_3> e_p_points;
@@ -156,13 +156,13 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_weighted_periodic) {
// ---------------------
// Compare both versions
// ---------------------
- std::cout << "Exact weighted periodic alpha complex 3d is of dimension " << exact_stree.dimension()
+ std::clog << "Exact weighted periodic alpha complex 3d is of dimension " << exact_stree.dimension()
<< " - Non exact is " << stree.dimension() << std::endl;
BOOST_CHECK(exact_stree.dimension() == stree.dimension());
- std::cout << "Exact weighted periodic alpha complex 3d num_simplices " << exact_stree.num_simplices()
+ std::clog << "Exact weighted periodic alpha complex 3d num_simplices " << exact_stree.num_simplices()
<< " - Non exact is " << stree.num_simplices() << std::endl;
BOOST_CHECK(exact_stree.num_simplices() == stree.num_simplices());
- std::cout << "Exact weighted periodic alpha complex 3d num_vertices " << exact_stree.num_vertices()
+ std::clog << "Exact weighted periodic alpha complex 3d num_vertices " << exact_stree.num_vertices()
<< " - Non exact is " << stree.num_vertices() << std::endl;
BOOST_CHECK(exact_stree.num_vertices() == stree.num_vertices());
@@ -189,7 +189,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_weighted_periodic) {
// ----------------------
// Safe weighted periodic version
// ----------------------
- std::cout << "Safe weighted periodic alpha complex 3d" << std::endl;
+ std::clog << "Safe weighted periodic alpha complex 3d" << std::endl;
std::vector<Safe_weighted_periodic_alpha_complex_3d::Bare_point_3> s_p_points;
diff --git a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
index 929fc2e8..91899040 100644
--- a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
@@ -222,10 +222,7 @@ int main(int argc, char **argv) {
break;
}
- // Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
-
- std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl;
+ std::clog << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl;
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(simplex_tree, true);
// initializes the coefficient field for homology
@@ -237,7 +234,7 @@ int main(int argc, char **argv) {
if (output_file_diag.empty()) {
pcoh.output_diagram();
} else {
- std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::clog << "Result in file: " << output_file_diag << std::endl;
std::ofstream out(output_file_diag);
pcoh.output_diagram(out);
out.close();
@@ -266,7 +263,7 @@ void program_options(int argc, char *argv[], std::string &off_file_points, bool
"cuboid-file,c", po::value<std::string>(&cuboid_file),
"Name of file describing the periodic domain. Format is:\n min_hx min_hy min_hz\n max_hx max_hy max_hz")(
"output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-alpha-square-value,r",
po::value<Filtration_value>(&alpha_square_max_value)
->default_value(std::numeric_limits<Filtration_value>::infinity()),
@@ -288,18 +285,18 @@ void program_options(int argc, char *argv[], std::string &off_file_points, bool
po::notify(vm);
if (vm.count("help") || !vm.count("input-file") || !vm.count("weight-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a 3D Alpha complex defined on a set of input points.\n";
- std::cout << "3D Alpha complex can be safe (by default) exact or fast, weighted and/or periodic\n\n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients.\n\n";
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a 3D Alpha complex defined on a set of input points.\n";
+ std::clog << "3D Alpha complex can be safe (by default) exact or fast, weighted and/or periodic\n\n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients.\n\n";
- std::cout << "Usage: " << argv[0] << " [options] input-file weight-file\n\n";
- std::cout << visible << std::endl;
+ std::clog << "Usage: " << argv[0] << " [options] input-file weight-file\n\n";
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
index 486347cc..7c898dfd 100644
--- a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
@@ -72,13 +72,10 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
// Display information about the alpha complex
// ----------------------------------------------------------------------------
- std::cout << "Simplicial complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
+ std::clog << "Simplicial complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
<< " simplices - " << simplex.num_vertices() << " vertices." << std::endl;
- // Sort the simplices in the order of the filtration
- simplex.initialize_filtration();
-
- std::cout << "Simplex_tree dim: " << simplex.dimension() << std::endl;
+ std::clog << "Simplex_tree dim: " << simplex.dimension() << std::endl;
// Compute the persistence diagram of the complex
Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Gudhi::persistent_cohomology::Field_Zp> pcoh(
simplex);
@@ -91,7 +88,7 @@ int main(int argc, char **argv) {
if (output_file_diag.empty()) {
pcoh.output_diagram();
} else {
- std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::clog << "Result in file: " << output_file_diag << std::endl;
std::ofstream out(output_file_diag);
pcoh.output_diagram(out);
out.close();
@@ -114,7 +111,7 @@ void program_options(int argc, char *argv[], std::string &off_file_points, bool
"fast,f", po::bool_switch(&fast),
"To activate fast version of Alpha complex (default is false, not available if exact is set)")(
"output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-alpha-square-value,r", po::value<Filtration_value>(&alpha_square_max_value)
->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal alpha square value for the Alpha complex construction.")(
@@ -135,17 +132,17 @@ void program_options(int argc, char *argv[], std::string &off_file_points, bool
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of an Alpha complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of an Alpha complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp
index 46ea8f2e..e5512418 100644
--- a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp
+++ b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp
@@ -21,7 +21,7 @@
int main(int argc, char** argv) {
srand(time(0));
- std::cout
+ std::clog
<< "This program computes persistent homology, by using bitmap_cubical_complex class, of cubical "
<< "complexes. The first parameter of the program is the dimension D of the bitmap. The next D parameters are "
<< "number of top dimensional cubes in each dimension of the bitmap. The program will create random cubical "
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
index 37514dee..aa255ec2 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
@@ -69,7 +69,7 @@ class Bitmap_cubical_complex : public T {
Bitmap_cubical_complex(const char* perseus_style_file)
: T(perseus_style_file), key_associated_to_simplex(this->total_number_of_cells + 1) {
if (globalDbg) {
- std::cerr << "Bitmap_cubical_complex( const char* perseus_style_file )\n";
+ std::clog << "Bitmap_cubical_complex( const char* perseus_style_file )\n";
}
for (std::size_t i = 0; i != this->total_number_of_cells; ++i) {
this->key_associated_to_simplex[i] = i;
@@ -137,7 +137,7 @@ class Bitmap_cubical_complex : public T {
**/
static Simplex_handle null_simplex() {
if (globalDbg) {
- std::cerr << "Simplex_handle null_simplex()\n";
+ std::clog << "Simplex_handle null_simplex()\n";
}
return std::numeric_limits<Simplex_handle>::max();
}
@@ -152,7 +152,7 @@ class Bitmap_cubical_complex : public T {
**/
inline unsigned dimension(Simplex_handle sh) const {
if (globalDbg) {
- std::cerr << "unsigned dimension(const Simplex_handle& sh)\n";
+ std::clog << "unsigned dimension(const Simplex_handle& sh)\n";
}
if (sh != null_simplex()) return this->get_dimension_of_a_cell(sh);
return -1;
@@ -163,7 +163,7 @@ class Bitmap_cubical_complex : public T {
**/
Filtration_value filtration(Simplex_handle sh) {
if (globalDbg) {
- std::cerr << "Filtration_value filtration(const Simplex_handle& sh)\n";
+ std::clog << "Filtration_value filtration(const Simplex_handle& sh)\n";
}
// Returns the filtration value of a simplex.
if (sh != null_simplex()) return this->data[sh];
@@ -175,7 +175,7 @@ class Bitmap_cubical_complex : public T {
**/
static Simplex_key null_key() {
if (globalDbg) {
- std::cerr << "Simplex_key null_key()\n";
+ std::clog << "Simplex_key null_key()\n";
}
return std::numeric_limits<Simplex_handle>::max();
}
@@ -185,7 +185,7 @@ class Bitmap_cubical_complex : public T {
**/
Simplex_key key(Simplex_handle sh) const {
if (globalDbg) {
- std::cerr << "Simplex_key key(const Simplex_handle& sh)\n";
+ std::clog << "Simplex_key key(const Simplex_handle& sh)\n";
}
if (sh != null_simplex()) {
return this->key_associated_to_simplex[sh];
@@ -198,7 +198,7 @@ class Bitmap_cubical_complex : public T {
**/
Simplex_handle simplex(Simplex_key key) {
if (globalDbg) {
- std::cerr << "Simplex_handle simplex(Simplex_key key)\n";
+ std::clog << "Simplex_handle simplex(Simplex_key key)\n";
}
if (key != null_key()) {
return this->simplex_associated_to_key[key];
@@ -211,7 +211,7 @@ class Bitmap_cubical_complex : public T {
**/
void assign_key(Simplex_handle sh, Simplex_key key) {
if (globalDbg) {
- std::cerr << "void assign_key(Simplex_handle& sh, Simplex_key key)\n";
+ std::clog << "void assign_key(Simplex_handle& sh, Simplex_key key)\n";
}
if (key == null_key()) return;
this->key_associated_to_simplex[sh] = key;
@@ -251,7 +251,7 @@ class Bitmap_cubical_complex : public T {
Filtration_simplex_iterator operator++() {
if (globalDbg) {
- std::cerr << "Filtration_simplex_iterator operator++\n";
+ std::clog << "Filtration_simplex_iterator operator++\n";
}
++this->position;
return (*this);
@@ -265,7 +265,7 @@ class Bitmap_cubical_complex : public T {
Filtration_simplex_iterator& operator=(const Filtration_simplex_iterator& rhs) {
if (globalDbg) {
- std::cerr << "Filtration_simplex_iterator operator =\n";
+ std::clog << "Filtration_simplex_iterator operator =\n";
}
this->b = rhs.b;
this->position = rhs.position;
@@ -274,21 +274,21 @@ class Bitmap_cubical_complex : public T {
bool operator==(const Filtration_simplex_iterator& rhs) const {
if (globalDbg) {
- std::cerr << "bool operator == ( const Filtration_simplex_iterator& rhs )\n";
+ std::clog << "bool operator == ( const Filtration_simplex_iterator& rhs )\n";
}
return (this->position == rhs.position);
}
bool operator!=(const Filtration_simplex_iterator& rhs) const {
if (globalDbg) {
- std::cerr << "bool operator != ( const Filtration_simplex_iterator& rhs )\n";
+ std::clog << "bool operator != ( const Filtration_simplex_iterator& rhs )\n";
}
return !(*this == rhs);
}
Simplex_handle operator*() {
if (globalDbg) {
- std::cerr << "Simplex_handle operator*()\n";
+ std::clog << "Simplex_handle operator*()\n";
}
return this->b->simplex_associated_to_key[this->position];
}
@@ -314,14 +314,14 @@ class Bitmap_cubical_complex : public T {
Filtration_simplex_iterator begin() {
if (globalDbg) {
- std::cerr << "Filtration_simplex_iterator begin() \n";
+ std::clog << "Filtration_simplex_iterator begin() \n";
}
return Filtration_simplex_iterator(this->b);
}
Filtration_simplex_iterator end() {
if (globalDbg) {
- std::cerr << "Filtration_simplex_iterator end()\n";
+ std::clog << "Filtration_simplex_iterator end()\n";
}
Filtration_simplex_iterator it(this->b);
it.position = this->b->simplex_associated_to_key.size();
@@ -347,7 +347,7 @@ class Bitmap_cubical_complex : public T {
**/
Filtration_simplex_range filtration_simplex_range() {
if (globalDbg) {
- std::cerr << "Filtration_simplex_range filtration_simplex_range()\n";
+ std::clog << "Filtration_simplex_range filtration_simplex_range()\n";
}
// Returns a range over the simplices of the complex in the order of the filtration
return Filtration_simplex_range(this);
@@ -370,8 +370,8 @@ class Bitmap_cubical_complex : public T {
std::pair<Simplex_handle, Simplex_handle> endpoints(Simplex_handle sh) {
std::vector<std::size_t> bdry = this->get_boundary_of_a_cell(sh);
if (globalDbg) {
- std::cerr << "std::pair<Simplex_handle, Simplex_handle> endpoints( Simplex_handle sh )\n";
- std::cerr << "bdry.size() : " << bdry.size() << "\n";
+ std::clog << "std::pair<Simplex_handle, Simplex_handle> endpoints( Simplex_handle sh )\n";
+ std::clog << "bdry.size() : " << bdry.size() << "\n";
}
// this method returns two first elements from the boundary of sh.
if (bdry.size() < 2)
@@ -392,7 +392,7 @@ class Bitmap_cubical_complex : public T {
public:
Skeleton_simplex_iterator(Bitmap_cubical_complex* b, std::size_t d) : b(b), dimension(d) {
if (globalDbg) {
- std::cerr << "Skeleton_simplex_iterator ( Bitmap_cubical_complex* b , std::size_t d )\n";
+ std::clog << "Skeleton_simplex_iterator ( Bitmap_cubical_complex* b , std::size_t d )\n";
}
// find the position of the first simplex of a dimension d
this->position = 0;
@@ -406,7 +406,7 @@ class Bitmap_cubical_complex : public T {
Skeleton_simplex_iterator operator++() {
if (globalDbg) {
- std::cerr << "Skeleton_simplex_iterator operator++()\n";
+ std::clog << "Skeleton_simplex_iterator operator++()\n";
}
// increment the position as long as you did not get to the next element of the dimension dimension.
++this->position;
@@ -425,7 +425,7 @@ class Bitmap_cubical_complex : public T {
Skeleton_simplex_iterator& operator=(const Skeleton_simplex_iterator& rhs) {
if (globalDbg) {
- std::cerr << "Skeleton_simplex_iterator operator =\n";
+ std::clog << "Skeleton_simplex_iterator operator =\n";
}
this->b = rhs.b;
this->position = rhs.position;
@@ -435,21 +435,21 @@ class Bitmap_cubical_complex : public T {
bool operator==(const Skeleton_simplex_iterator& rhs) const {
if (globalDbg) {
- std::cerr << "bool operator ==\n";
+ std::clog << "bool operator ==\n";
}
return (this->position == rhs.position);
}
bool operator!=(const Skeleton_simplex_iterator& rhs) const {
if (globalDbg) {
- std::cerr << "bool operator != ( const Skeleton_simplex_iterator& rhs )\n";
+ std::clog << "bool operator != ( const Skeleton_simplex_iterator& rhs )\n";
}
return !(*this == rhs);
}
Simplex_handle operator*() {
if (globalDbg) {
- std::cerr << "Simplex_handle operator*() \n";
+ std::clog << "Simplex_handle operator*() \n";
}
return this->position;
}
@@ -476,14 +476,14 @@ class Bitmap_cubical_complex : public T {
Skeleton_simplex_iterator begin() {
if (globalDbg) {
- std::cerr << "Skeleton_simplex_iterator begin()\n";
+ std::clog << "Skeleton_simplex_iterator begin()\n";
}
return Skeleton_simplex_iterator(this->b, this->dimension);
}
Skeleton_simplex_iterator end() {
if (globalDbg) {
- std::cerr << "Skeleton_simplex_iterator end()\n";
+ std::clog << "Skeleton_simplex_iterator end()\n";
}
Skeleton_simplex_iterator it(this->b, this->dimension);
it.position = this->b->data.size();
@@ -500,7 +500,7 @@ class Bitmap_cubical_complex : public T {
**/
Skeleton_simplex_range skeleton_simplex_range(unsigned dimension) {
if (globalDbg) {
- std::cerr << "Skeleton_simplex_range skeleton_simplex_range( unsigned dimension )\n";
+ std::clog << "Skeleton_simplex_range skeleton_simplex_range( unsigned dimension )\n";
}
return Skeleton_simplex_range(this, dimension);
}
@@ -515,7 +515,7 @@ class Bitmap_cubical_complex : public T {
template <typename T>
void Bitmap_cubical_complex<T>::initialize_simplex_associated_to_key() {
if (globalDbg) {
- std::cerr << "void Bitmap_cubical_complex<T>::initialize_elements_ordered_according_to_filtration() \n";
+ std::clog << "void Bitmap_cubical_complex<T>::initialize_elements_ordered_according_to_filtration() \n";
}
this->simplex_associated_to_key = std::vector<std::size_t>(this->data.size());
std::iota(std::begin(simplex_associated_to_key), std::end(simplex_associated_to_key), 0);
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
index 0d6299d2..1eb77c9c 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
@@ -142,7 +142,7 @@ class Bitmap_cubical_complex_base {
}
if (coface_counter[i] != face_counter[i]) {
if (number_of_position_in_which_counters_do_not_agree != -1) {
- std::cout << "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.\n";
+ std::cerr << "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.\n";
throw std::logic_error(
"Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.");
}
@@ -408,7 +408,7 @@ class Bitmap_cubical_complex_base {
void print_counter() const {
for (std::size_t i = 0; i != this->counter.size(); ++i) {
- std::cout << this->counter[i] << " ";
+ std::clog << this->counter[i] << " ";
}
}
friend class Bitmap_cubical_complex_base;
@@ -521,11 +521,11 @@ void Bitmap_cubical_complex_base<T>::put_data_to_bins(std::size_t number_of_bins
// now put the data into the appropriate bins:
for (std::size_t i = 0; i != this->data.size(); ++i) {
if (dbg) {
- std::cerr << "Before binning : " << this->data[i] << std::endl;
+ std::clog << "Before binning : " << this->data[i] << std::endl;
}
this->data[i] = min_max.first + dx * (this->data[i] - min_max.first) / number_of_bins;
if (dbg) {
- std::cerr << "After binning : " << this->data[i] << std::endl;
+ std::clog << "After binning : " << this->data[i] << std::endl;
}
}
}
@@ -539,11 +539,11 @@ void Bitmap_cubical_complex_base<T>::put_data_to_bins(T diameter_of_bin) {
// now put the data into the appropriate bins:
for (std::size_t i = 0; i != this->data.size(); ++i) {
if (dbg) {
- std::cerr << "Before binning : " << this->data[i] << std::endl;
+ std::clog << "Before binning : " << this->data[i] << std::endl;
}
this->data[i] = min_max.first + diameter_of_bin * (this->data[i] - min_max.first) / number_of_bins;
if (dbg) {
- std::cerr << "After binning : " << this->data[i] << std::endl;
+ std::clog << "After binning : " << this->data[i] << std::endl;
}
}
}
@@ -617,7 +617,7 @@ void Bitmap_cubical_complex_base<T>::read_perseus_style_file(const char* perseus
inFiltration >> dimensionOfData;
if (dbg) {
- std::cerr << "dimensionOfData : " << dimensionOfData << std::endl;
+ std::clog << "dimensionOfData : " << dimensionOfData << std::endl;
}
std::vector<unsigned> sizes;
@@ -630,7 +630,7 @@ void Bitmap_cubical_complex_base<T>::read_perseus_style_file(const char* perseus
sizes.push_back(size_in_this_dimension);
dimensions *= size_in_this_dimension;
if (dbg) {
- std::cerr << "size_in_this_dimension : " << size_in_this_dimension << std::endl;
+ std::clog << "size_in_this_dimension : " << size_in_this_dimension << std::endl;
}
}
this->set_up_containers(sizes);
@@ -651,7 +651,7 @@ void Bitmap_cubical_complex_base<T>::read_perseus_style_file(const char* perseus
}
if (dbg) {
- std::cerr << "Cell of an index : " << it.compute_index_in_bitmap()
+ std::clog << "Cell of an index : " << it.compute_index_in_bitmap()
<< " and dimension: " << this->get_dimension_of_a_cell(it.compute_index_in_bitmap())
<< " get the value : " << filtrationLevel << std::endl;
}
@@ -754,20 +754,20 @@ std::vector<std::size_t> Bitmap_cubical_complex_base<T>::get_coboundary_of_a_cel
template <typename T>
unsigned Bitmap_cubical_complex_base<T>::get_dimension_of_a_cell(std::size_t cell) const {
bool dbg = false;
- if (dbg) std::cerr << "\n\n\n Computing position o a cell of an index : " << cell << std::endl;
+ if (dbg) std::clog << "\n\n\n Computing position o a cell of an index : " << cell << std::endl;
unsigned dimension = 0;
for (std::size_t i = this->multipliers.size(); i != 0; --i) {
unsigned position = cell / this->multipliers[i - 1];
if (dbg) {
- std::cerr << "i-1 :" << i - 1 << std::endl;
- std::cerr << "cell : " << cell << std::endl;
- std::cerr << "position : " << position << std::endl;
- std::cerr << "multipliers[" << i - 1 << "] = " << this->multipliers[i - 1] << std::endl;
+ std::clog << "i-1 :" << i - 1 << std::endl;
+ std::clog << "cell : " << cell << std::endl;
+ std::clog << "position : " << position << std::endl;
+ std::clog << "multipliers[" << i - 1 << "] = " << this->multipliers[i - 1] << std::endl;
}
if (position % 2 == 1) {
- if (dbg) std::cerr << "Nonzero length in this direction \n";
+ if (dbg) std::clog << "Nonzero length in this direction \n";
dimension++;
}
cell = cell % this->multipliers[i - 1];
@@ -803,9 +803,9 @@ void Bitmap_cubical_complex_base<T>::impose_lower_star_filtration() {
while (indices_to_consider.size()) {
if (dbg) {
- std::cerr << "indices_to_consider in this iteration \n";
+ std::clog << "indices_to_consider in this iteration \n";
for (std::size_t i = 0; i != indices_to_consider.size(); ++i) {
- std::cout << indices_to_consider[i] << " ";
+ std::clog << indices_to_consider[i] << " ";
}
}
std::vector<std::size_t> new_indices_to_consider;
@@ -813,14 +813,14 @@ void Bitmap_cubical_complex_base<T>::impose_lower_star_filtration() {
std::vector<std::size_t> bd = this->get_boundary_of_a_cell(indices_to_consider[i]);
for (std::size_t boundaryIt = 0; boundaryIt != bd.size(); ++boundaryIt) {
if (dbg) {
- std::cerr << "filtration of a cell : " << bd[boundaryIt] << " is : " << this->data[bd[boundaryIt]]
+ std::clog << "filtration of a cell : " << bd[boundaryIt] << " is : " << this->data[bd[boundaryIt]]
<< " while of a cell: " << indices_to_consider[i] << " is: " << this->data[indices_to_consider[i]]
<< std::endl;
}
if (this->data[bd[boundaryIt]] > this->data[indices_to_consider[i]]) {
this->data[bd[boundaryIt]] = this->data[indices_to_consider[i]];
if (dbg) {
- std::cerr << "Setting the value of a cell : " << bd[boundaryIt]
+ std::clog << "Setting the value of a cell : " << bd[boundaryIt]
<< " to : " << this->data[indices_to_consider[i]] << std::endl;
}
}
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
index edd794fe..18901469 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
@@ -128,7 +128,7 @@ class Bitmap_cubical_complex_periodic_boundary_conditions_base : public Bitmap_c
}
if (coface_counter[i] != face_counter[i]) {
if (number_of_position_in_which_counters_do_not_agree != -1) {
- std::cout << "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.\n";
+ std::cerr << "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.\n";
throw std::logic_error(
"Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.");
}
@@ -237,7 +237,7 @@ Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_comp
if (inFiltration.eof()) break;
if (dbg) {
- std::cerr << "Cell of an index : " << it.compute_index_in_bitmap()
+ std::clog << "Cell of an index : " << it.compute_index_in_bitmap()
<< " and dimension: " << this->get_dimension_of_a_cell(it.compute_index_in_bitmap())
<< " get the value : " << filtrationLevel << std::endl;
}
@@ -278,7 +278,7 @@ std::vector<std::size_t> Bitmap_cubical_complex_periodic_boundary_conditions_bas
std::size_t cell) const {
bool dbg = false;
if (dbg) {
- std::cerr << "Computations of boundary of a cell : " << cell << std::endl;
+ std::clog << "Computations of boundary of a cell : " << cell << std::endl;
}
std::vector<std::size_t> boundary_elements;
@@ -292,7 +292,6 @@ std::vector<std::size_t> Bitmap_cubical_complex_periodic_boundary_conditions_bas
if (position % 2 == 1) {
// if there are no periodic boundary conditions in this direction, we do not have to do anything.
if (!directions_in_which_periodic_b_cond_are_to_be_imposed[i - 1]) {
- // std::cerr << "A\n";
if (sum_of_dimensions % 2) {
boundary_elements.push_back(cell - this->multipliers[i - 1]);
boundary_elements.push_back(cell + this->multipliers[i - 1]);
@@ -301,12 +300,11 @@ std::vector<std::size_t> Bitmap_cubical_complex_periodic_boundary_conditions_bas
boundary_elements.push_back(cell - this->multipliers[i - 1]);
}
if (dbg) {
- std::cerr << cell - this->multipliers[i - 1] << " " << cell + this->multipliers[i - 1] << " ";
+ std::clog << cell - this->multipliers[i - 1] << " " << cell + this->multipliers[i - 1] << " ";
}
} else {
// in this direction we have to do boundary conditions. Therefore, we need to check if we are not at the end.
if (position != 2 * this->sizes[i - 1] - 1) {
- // std::cerr << "B\n";
if (sum_of_dimensions % 2) {
boundary_elements.push_back(cell - this->multipliers[i - 1]);
boundary_elements.push_back(cell + this->multipliers[i - 1]);
@@ -315,10 +313,9 @@ std::vector<std::size_t> Bitmap_cubical_complex_periodic_boundary_conditions_bas
boundary_elements.push_back(cell - this->multipliers[i - 1]);
}
if (dbg) {
- std::cerr << cell - this->multipliers[i - 1] << " " << cell + this->multipliers[i - 1] << " ";
+ std::clog << cell - this->multipliers[i - 1] << " " << cell + this->multipliers[i - 1] << " ";
}
} else {
- // std::cerr << "C\n";
if (sum_of_dimensions % 2) {
boundary_elements.push_back(cell - this->multipliers[i - 1]);
boundary_elements.push_back(cell - (2 * this->sizes[i - 1] - 1) * this->multipliers[i - 1]);
@@ -327,7 +324,7 @@ std::vector<std::size_t> Bitmap_cubical_complex_periodic_boundary_conditions_bas
boundary_elements.push_back(cell - this->multipliers[i - 1]);
}
if (dbg) {
- std::cerr << cell - this->multipliers[i - 1] << " "
+ std::clog << cell - this->multipliers[i - 1] << " "
<< cell - (2 * this->sizes[i - 1] - 1) * this->multipliers[i - 1] << " ";
}
}
diff --git a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp
index f18adb36..6f35b6da 100644
--- a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp
+++ b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp
@@ -1402,12 +1402,12 @@ BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_periodic_case_2d) {
it != ba.all_cells_iterator_end(); ++it) {
int i = 1;
- // std::cout << "Element : " << *it << std::endl;
+ // std::clog << "Element : " << *it << std::endl;
Bitmap_cubical_complex_periodic_boundary_conditions_base::Boundary_range bdrange = ba.boundary_range(*it);
for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd = bdrange.begin();
bd != bdrange.end(); ++bd) {
- // std::cout << *bd << " ";
+ // std::clog << *bd << " ";
Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_range second_bdrange = ba.boundary_range(*bd);
int j = 1;
for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd2 = second_bdrange.begin();
@@ -1441,7 +1441,7 @@ BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_periodic_case_3d) {
std::vector<int> elems_in_boundary(number_of_all_elements, 0);
for (Bitmap_cubical_complex_periodic_boundary_conditions::All_cells_iterator it = ba.all_cells_iterator_begin();
it != ba.all_cells_iterator_end(); ++it) {
- // std::cout << "Element : " << *it << std::endl;
+ // std::clog << "Element : " << *it << std::endl;
int i = 1;
@@ -1449,7 +1449,7 @@ BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_periodic_case_3d) {
for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd = bdrange.begin();
bd != bdrange.end(); ++bd) {
Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_range second_bdrange = ba.boundary_range(*bd);
- // std::cout << *bd << " ";
+ // std::clog << *bd << " ";
int j = 1;
for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd2 = second_bdrange.begin();
bd2 != second_bdrange.end(); ++bd2) {
@@ -1551,7 +1551,7 @@ BOOST_AUTO_TEST_CASE(compute_incidence_between_cells_test_periodic_boundary_cond
Bitmap_cubical_complex_periodic_boundary_conditions_base::Boundary_range bdrange = ba.boundary_range(*it);
for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd = bdrange.begin();
bd != bdrange.end(); ++bd) {
- // std::cout << *bd << " ";
+ // std::clog << *bd << " ";
Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_range second_bdrange = ba.boundary_range(*bd);
for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd2 = second_bdrange.begin();
bd2 != second_bdrange.end(); ++bd2) {
@@ -1571,11 +1571,11 @@ BOOST_AUTO_TEST_CASE(perseus_file_read) {
auto it = increasing.top_dimensional_cells_iterator_begin();
double value = increasing.get_cell_data(*it);
- std::cout << "First value of sinusoid.txt is " << value << std::endl;
+ std::clog << "First value of sinusoid.txt is " << value << std::endl;
BOOST_CHECK(value == 10.);
// Next value
++it;
value = increasing.get_cell_data(*it);
- std::cout << "Second value of sinusoid.txt is " << value << std::endl;
+ std::clog << "Second value of sinusoid.txt is " << value << std::endl;
BOOST_CHECK(value == std::numeric_limits<double>::infinity());
}
diff --git a/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp
index a9792c2d..510861cd 100644
--- a/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp
+++ b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp
@@ -19,7 +19,7 @@
#include <cstddef>
int main(int argc, char** argv) {
- std::cout
+ std::clog
<< "This program computes persistent homology, by using bitmap_cubical_complex class, of cubical "
<< "complexes provided in text files in Perseus style (the only numbered in the first line is a dimension D of a"
<< "bitmap. In the lines I between 2 and D+1 there are numbers of top dimensional cells in the direction I. Let "
@@ -62,7 +62,7 @@ int main(int argc, char** argv) {
pcoh.output_diagram(out);
out.close();
- std::cout << "Result in file: " << output_file_name << "\n";
+ std::clog << "Result in file: " << output_file_name << "\n";
return 0;
}
diff --git a/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp
index fa97bac0..86816417 100644
--- a/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp
+++ b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp
@@ -20,7 +20,7 @@
#include <string>
int main(int argc, char** argv) {
- std::cout
+ std::clog
<< "This program computes persistent homology, by using "
<< "Bitmap_cubical_complex_periodic_boundary_conditions class, of cubical complexes provided in text files in "
<< "Perseus style (the only numbered in the first line is a dimension D of a bitmap. In the lines I between 2 "
@@ -64,7 +64,7 @@ int main(int argc, char** argv) {
pcoh.output_diagram(out);
out.close();
- std::cout << "Result in file: " << output_file_name << "\n";
+ std::clog << "Result in file: " << output_file_name << "\n";
return 0;
}
diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h
index bbc952e1..2a988b4b 100644
--- a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h
+++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h
@@ -52,7 +52,7 @@ int main() {
diag2.emplace_back(0., 13.);
double b = Gudhi::persistence_diagram::bottleneck_distance(diag1, diag2);
- std::cout << "Bottleneck distance = " << b << std::endl;
+ std::clog << "Bottleneck distance = " << b << std::endl;
}
* \endcode
*
diff --git a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
index 6c0dc9bf..ceb9e226 100644
--- a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
+++ b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
@@ -68,12 +68,9 @@ int main(int argc, char * argv[]) {
Simplex_tree rips_stree;
rips_complex.create_complex(rips_stree, dim_max);
- std::cout << "The Rips complex contains " << rips_stree.num_simplices() << " simplices and has dimension "
+ std::clog << "The Rips complex contains " << rips_stree.num_simplices() << " simplices and has dimension "
<< rips_stree.dimension() << " \n";
- // Sort the simplices in the order of the filtration
- rips_stree.initialize_filtration();
-
// Compute the persistence diagram of the complex
Persistent_cohomology rips_pcoh(rips_stree);
// initializes the coefficient field for homology
@@ -89,12 +86,9 @@ int main(int argc, char * argv[]) {
Simplex_tree alpha_stree;
alpha_complex.create_complex(alpha_stree, threshold * threshold);
- std::cout << "The Alpha complex contains " << alpha_stree.num_simplices() << " simplices and has dimension "
+ std::clog << "The Alpha complex contains " << alpha_stree.num_simplices() << " simplices and has dimension "
<< alpha_stree.dimension() << " \n";
- // Sort the simplices in the order of the filtration
- alpha_stree.initialize_filtration();
-
// Compute the persistence diagram of the complex
Persistent_cohomology alpha_pcoh(alpha_stree);
// initializes the coefficient field for homology
@@ -115,12 +109,12 @@ int main(int argc, char * argv[]) {
std::transform(alpha_intervals.begin(), alpha_intervals.end(), alpha_intervals.begin(), compute_root_square);
double bottleneck_distance = Gudhi::persistence_diagram::bottleneck_distance(rips_intervals, alpha_intervals);
- std::cout << "In dimension " << dim << ", bottleneck distance = " << bottleneck_distance << std::endl;
+ std::clog << "In dimension " << dim << ", bottleneck distance = " << bottleneck_distance << std::endl;
if (bottleneck_distance > max_b_distance)
max_b_distance = bottleneck_distance;
}
- std::cout << "================================================================================" << std::endl;
- std::cout << "Bottleneck distance is " << max_b_distance << std::endl;
+ std::clog << "================================================================================" << std::endl;
+ std::clog << "Bottleneck distance is " << max_b_distance << std::endl;
return 0;
}
@@ -162,17 +156,17 @@ void program_options(int argc, char * argv[]
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Rips complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Rips complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Bottleneck_distance/example/bottleneck_basic_example.cpp b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp
index 61778a55..e8632a4f 100644
--- a/src/Bottleneck_distance/example/bottleneck_basic_example.cpp
+++ b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp
@@ -20,9 +20,9 @@ int main() {
double b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2);
- std::cout << "Bottleneck distance = " << b << std::endl;
+ std::clog << "Bottleneck distance = " << b << std::endl;
b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.1);
- std::cout << "Approx bottleneck distance = " << b << std::endl;
+ std::clog << "Approx bottleneck distance = " << b << std::endl;
}
diff --git a/src/Bottleneck_distance/utilities/bottleneck_distance.cpp b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp
index d88a8a0b..01813ba1 100644
--- a/src/Bottleneck_distance/utilities/bottleneck_distance.cpp
+++ b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp
@@ -18,7 +18,7 @@
int main(int argc, char** argv) {
if (argc < 3) {
- std::cout << "To run this program please provide as an input two files with persistence diagrams. Each file" <<
+ std::clog << "To run this program please provide as an input two files with persistence diagrams. Each file" <<
" should contain a birth-death pair per line. Third, optional parameter is an error bound on the bottleneck" <<
" distance (set by default to the smallest positive double value). If you set the error bound to 0, be" <<
" aware this version is exact but expensive. The program will now terminate \n";
@@ -32,7 +32,7 @@ int main(int argc, char** argv) {
tolerance = atof(argv[3]);
}
double b = Gudhi::persistence_diagram::bottleneck_distance(diag1, diag2, tolerance);
- std::cout << "The distance between the diagrams is : " << b << ". The tolerance is : " << tolerance << std::endl;
+ std::clog << "The distance between the diagrams is : " << b << ". The tolerance is : " << tolerance << std::endl;
return 0;
}
diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
index d2d71dbf..e489e8a4 100644
--- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
+++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
@@ -68,24 +68,24 @@ int main(int argc, char* argv[]) {
Proximity_graph euclidean_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
off_reader.get_point_cloud(), threshold, Gudhi::Euclidean_distance());
- std::cout << euclidean_clock << std::endl;
+ std::clog << euclidean_clock << std::endl;
Gudhi::Clock miniball_clock("Minimal_enclosing_ball_radius");
// Compute the proximity graph of the points
Proximity_graph miniball_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
off_reader.get_point_cloud(), threshold, Minimal_enclosing_ball_radius());
- std::cout << miniball_clock << std::endl;
+ std::clog << miniball_clock << std::endl;
Gudhi::Clock common_miniball_clock("Gudhi::Minimal_enclosing_ball_radius()");
// Compute the proximity graph of the points
Proximity_graph common_miniball_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
off_reader.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius());
- std::cout << common_miniball_clock << std::endl;
+ std::clog << common_miniball_clock << std::endl;
boost::filesystem::path full_path(boost::filesystem::current_path());
- std::cout << "Current path is : " << full_path << std::endl;
+ std::clog << "Current path is : " << full_path << std::endl;
- std::cout << "File name;Radius;Rips time;Cech time; Ratio Rips/Cech time;Rips nb simplices;Cech nb simplices;"
+ std::clog << "File name;Radius;Rips time;Cech time; Ratio Rips/Cech time;Rips nb simplices;Cech nb simplices;"
<< std::endl;
boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end
for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) {
@@ -96,8 +96,8 @@ int main(int argc, char* argv[]) {
Point p0 = off_reader.get_point_cloud()[0];
for (Filtration_value radius = 0.1; radius < 0.4; radius += 0.1) {
- std::cout << itr->path().stem() << ";";
- std::cout << radius << ";";
+ std::clog << itr->path().stem() << ";";
+ std::clog << radius << ";";
Gudhi::Clock rips_clock("Rips computation");
Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius,
Gudhi::Minimal_enclosing_ball_radius());
@@ -107,7 +107,7 @@ int main(int argc, char* argv[]) {
// Display information about the Rips complex
// ------------------------------------------
double rips_sec = rips_clock.num_seconds();
- std::cout << rips_sec << ";";
+ std::clog << rips_sec << ";";
Gudhi::Clock cech_clock("Cech computation");
Cech_complex cech_complex_from_points(off_reader.get_point_cloud(), radius);
@@ -117,12 +117,12 @@ int main(int argc, char* argv[]) {
// Display information about the Cech complex
// ------------------------------------------
double cech_sec = cech_clock.num_seconds();
- std::cout << cech_sec << ";";
- std::cout << cech_sec / rips_sec << ";";
+ std::clog << cech_sec << ";";
+ std::clog << cech_sec / rips_sec << ";";
assert(rips_stree.num_simplices() >= cech_stree.num_simplices());
- std::cout << rips_stree.num_simplices() << ";";
- std::cout << cech_stree.num_simplices() << ";" << std::endl;
+ std::clog << rips_stree.num_simplices() << ";";
+ std::clog << cech_stree.num_simplices() << ";" << std::endl;
}
}
}
diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp
index 3cc5a4df..1a1f708c 100644
--- a/src/Cech_complex/example/cech_complex_example_from_points.cpp
+++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp
@@ -37,18 +37,18 @@ int main() {
// ----------------------------------------------------------------------------
// Display information about the one skeleton Cech complex
// ----------------------------------------------------------------------------
- std::cout << "Cech complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
+ std::clog << "Cech complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
<< stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on Cech complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on Cech complex simplices in the filtration order, with [filtration value]:" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> "
+ std::clog << ") -> "
<< "[" << stree.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << std::endl;
}
return 0;
}
diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp
index b3d05697..f59f0293 100644
--- a/src/Cech_complex/example/cech_complex_step_by_step.cpp
+++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp
@@ -51,12 +51,12 @@ class Cech_blocker {
for (auto vertex : simplex_tree_.simplex_vertex_range(sh)) {
points.push_back(point_cloud_[vertex]);
#ifdef DEBUG_TRACES
- std::cout << "#(" << vertex << ")#";
+ std::clog << "#(" << vertex << ")#";
#endif // DEBUG_TRACES
}
Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points);
#ifdef DEBUG_TRACES
- std::cout << "radius = " << radius << " - " << (radius > max_radius_) << std::endl;
+ std::clog << "radius = " << radius << " - " << (radius > max_radius_) << std::endl;
#endif // DEBUG_TRACES
simplex_tree_.assign_filtration(sh, radius);
return (radius > max_radius_);
@@ -96,23 +96,23 @@ int main(int argc, char* argv[]) {
// expand the graph until dimension dim_max
st.expansion_with_blockers(dim_max, Cech_blocker(st, max_radius, off_reader.get_point_cloud()));
- std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << st.dimension() << " \n";
+ std::clog << "The complex contains " << st.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << st.dimension() << " \n";
// Sort the simplices in the order of the filtration
st.initialize_filtration();
#if DEBUG_TRACES
- std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " "
+ std::clog << " "
<< "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << static_cast<int>(vertex) << " ";
+ std::clog << static_cast<int>(vertex) << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
#endif // DEBUG_TRACES
@@ -144,11 +144,11 @@ void program_options(int argc, char* argv[], std::string& off_file_points, Filtr
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Construct a Cech complex defined on a set of input points.\n \n";
+ std::clog << std::endl;
+ std::clog << "Construct a Cech complex defined on a set of input points.\n \n";
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
index 068cdde3..31b9aab5 100644
--- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
+++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
@@ -53,12 +53,12 @@ class Cech_blocker {
for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) {
points.push_back(cc_ptr_->get_point(vertex));
#ifdef DEBUG_TRACES
- std::cout << "#(" << vertex << ")#";
+ std::clog << "#(" << vertex << ")#";
#endif // DEBUG_TRACES
}
Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points);
#ifdef DEBUG_TRACES
- if (radius > cc_ptr_->max_radius()) std::cout << "radius > max_radius => expansion is blocked\n";
+ if (radius > cc_ptr_->max_radius()) std::clog << "radius > max_radius => expansion is blocked\n";
#endif // DEBUG_TRACES
sc_ptr_->assign_filtration(sh, radius);
return (radius > cc_ptr_->max_radius());
diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp
index c6b15d7f..6e00d7b5 100644
--- a/src/Cech_complex/test/test_cech_complex.cpp
+++ b/src/Cech_complex/test/test_cech_complex.cpp
@@ -58,7 +58,7 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
points.push_back({-0.5, 2.}); // 10
Filtration_value max_radius = 1.0;
- std::cout << "========== NUMBER OF POINTS = " << points.size() << " - Cech max_radius = " << max_radius
+ std::clog << "========== NUMBER OF POINTS = " << points.size() << " - Cech max_radius = " << max_radius
<< "==========" << std::endl;
Cech_complex cech_complex_for_doc(points, max_radius);
@@ -72,14 +72,14 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
const int DIMENSION_1 = 1;
Simplex_tree st;
cech_complex_for_doc.create_complex(st, DIMENSION_1);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
BOOST_CHECK(st.dimension() == DIMENSION_1);
const int NUMBER_OF_VERTICES = 11;
- std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ std::clog << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ std::clog << "st.num_simplices()=" << st.num_simplices() << std::endl;
BOOST_CHECK(st.num_simplices() == 27);
// Check filtration values of vertices is 0.0
@@ -91,12 +91,12 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
for (auto f_simplex : st.skeleton_simplex_range(DIMENSION_1)) {
if (DIMENSION_1 == st.dimension(f_simplex)) {
std::vector<Point> vp;
- std::cout << "vertex = (";
+ std::clog << "vertex = (";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << ",";
+ std::clog << vertex << ",";
vp.push_back(points.at(vertex));
}
- std::cout << ") - distance =" << Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1))
+ std::clog << ") - distance =" << Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1))
<< " - filtration =" << st.filtration(f_simplex) << std::endl;
BOOST_CHECK(vp.size() == 2);
GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex),
@@ -112,13 +112,13 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
Simplex_tree st2;
cech_complex_for_doc.create_complex(st2, DIMENSION_2);
- std::cout << "st2.dimension()=" << st2.dimension() << std::endl;
+ std::clog << "st2.dimension()=" << st2.dimension() << std::endl;
BOOST_CHECK(st2.dimension() == DIMENSION_2);
- std::cout << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
+ std::clog << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
BOOST_CHECK(st2.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
+ std::clog << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
BOOST_CHECK(st2.num_simplices() == 30);
Point_cloud points012;
@@ -129,7 +129,7 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
Min_sphere ms012(dimension, points012.begin(), points012.end());
Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
- std::cout << "f012= " << f012 << " | ms012_radius= " << std::sqrt(ms012.squared_radius()) << std::endl;
+ std::clog << "f012= " << f012 << " | ms012_radius= " << std::sqrt(ms012.squared_radius()) << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(ms012.squared_radius()));
@@ -140,7 +140,7 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
Min_sphere ms1410(dimension, points1410.begin(), points1410.end());
Simplex_tree::Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10}));
- std::cout << "f1410= " << f1410 << " | ms1410_radius= " << std::sqrt(ms1410.squared_radius()) << std::endl;
+ std::clog << "f1410= " << f1410 << " | ms1410_radius= " << std::sqrt(ms1410.squared_radius()) << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, std::sqrt(ms1410.squared_radius()));
@@ -151,7 +151,7 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
Min_sphere ms469(dimension, points469.begin(), points469.end());
Simplex_tree::Filtration_value f469 = st2.filtration(st2.find({4, 6, 9}));
- std::cout << "f469= " << f469 << " | ms469_radius= " << std::sqrt(ms469.squared_radius()) << std::endl;
+ std::clog << "f469= " << f469 << " | ms469_radius= " << std::sqrt(ms469.squared_radius()) << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(ms469.squared_radius()));
@@ -178,35 +178,35 @@ BOOST_AUTO_TEST_CASE(Cech_complex_from_points) {
// ----------------------------------------------------------------------------
Cech_complex cech_complex_from_points(points, 2.0);
- std::cout << "========== cech_complex_from_points ==========" << std::endl;
+ std::clog << "========== cech_complex_from_points ==========" << std::endl;
Simplex_tree st;
const int DIMENSION = 3;
cech_complex_from_points.create_complex(st, DIMENSION);
// Another way to check num_simplices
- std::cout << "Iterator on Cech complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on Cech complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
for (auto f_simplex : st.filtration_simplex_range()) {
num_simplices++;
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> "
+ std::clog << ") -> "
<< "[" << st.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << std::endl;
}
BOOST_CHECK(num_simplices == 15);
- std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ std::clog << "st.num_simplices()=" << st.num_simplices() << std::endl;
BOOST_CHECK(st.num_simplices() == 15);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
BOOST_CHECK(st.dimension() == DIMENSION);
- std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ std::clog << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == 4);
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
+ std::clog << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
switch (st.dimension(f_simplex)) {
case 0:
GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), 0.0);
@@ -236,7 +236,7 @@ BOOST_AUTO_TEST_CASE(Cech_create_complex_throw) {
// ----------------------------------------------------------------------------
std::string off_file_name("alphacomplexdoc.off");
double max_radius = 12.0;
- std::cout << "========== OFF FILE NAME = " << off_file_name << " - Cech max_radius=" << max_radius
+ std::clog << "========== OFF FILE NAME = " << off_file_name << " - Cech max_radius=" << max_radius
<< "==========" << std::endl;
Gudhi::Points_off_reader<Point> off_reader(off_file_name);
@@ -245,7 +245,7 @@ BOOST_AUTO_TEST_CASE(Cech_create_complex_throw) {
Simplex_tree stree;
std::vector<int> simplex = {0, 1, 2};
stree.insert_simplex_and_subfaces(simplex);
- std::cout << "Check exception throw in debug mode" << std::endl;
+ std::clog << "Check exception throw in debug mode" << std::endl;
// throw excpt because stree is not empty
BOOST_CHECK_THROW(cech_complex_from_file.create_complex(stree, 1), std::invalid_argument);
}
diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp
index 8cfe018b..daea08e2 100644
--- a/src/Cech_complex/utilities/cech_persistence.cpp
+++ b/src/Cech_complex/utilities/cech_persistence.cpp
@@ -50,8 +50,8 @@ int main(int argc, char* argv[]) {
Simplex_tree simplex_tree;
cech_complex_from_file.create_complex(simplex_tree, dim_max);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Sort the simplices in the order of the filtration
simplex_tree.initialize_filtration();
@@ -85,7 +85,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-radius,r",
po::value<Filtration_value>(&max_radius)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Cech complex construction.")(
@@ -108,17 +108,17 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Cech complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Cech complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Contraction/example/Garland_heckbert.cpp b/src/Contraction/example/Garland_heckbert.cpp
index 9c0b5205..489ef5d0 100644
--- a/src/Contraction/example/Garland_heckbert.cpp
+++ b/src/Contraction/example/Garland_heckbert.cpp
@@ -147,7 +147,7 @@ int main(int argc, char *argv[]) {
return EXIT_FAILURE;
}
- std::cout << "Load complex with " << complex.num_vertices() << " vertices" << std::endl;
+ std::clog << "Load complex with " << complex.num_vertices() << " vertices" << std::endl;
int num_contractions = atoi(argv[3]);
@@ -158,10 +158,10 @@ int main(int argc, char *argv[]) {
Gudhi::contraction::make_link_valid_contraction<EdgeProfile>(),
new GH_visitor(complex));
- std::cout << "Contract " << num_contractions << " edges" << std::endl;
+ std::clog << "Contract " << num_contractions << " edges" << std::endl;
contractor.contract_edges(num_contractions);
- std::cout << "Final complex has " <<
+ std::clog << "Final complex has " <<
complex.num_vertices() << " vertices, " <<
complex.num_edges() << " edges and " <<
complex.num_triangles() << " triangles." << std::endl;
diff --git a/src/Contraction/example/Rips_contraction.cpp b/src/Contraction/example/Rips_contraction.cpp
index b5ce06c1..42dd0910 100644
--- a/src/Contraction/example/Rips_contraction.cpp
+++ b/src/Contraction/example/Rips_contraction.cpp
@@ -52,13 +52,13 @@ int main(int argc, char *argv[]) {
return EXIT_FAILURE;
}
- std::cout << "Build the Rips complex with " << complex.num_vertices() << " vertices" << std::endl;
+ std::clog << "Build the Rips complex with " << complex.num_vertices() << " vertices" << std::endl;
build_rips(complex, atof(argv[2]));
Gudhi::Clock contraction_chrono("Time to simplify and enumerate simplices");
- std::cout << "Initial complex has " <<
+ std::clog << "Initial complex has " <<
complex.num_vertices() << " vertices and " <<
complex.num_edges() << " edges" << std::endl;
@@ -69,16 +69,16 @@ int main(int argc, char *argv[]) {
Gudhi::contraction::make_remove_popable_blockers_visitor<Profile>());
contractor.contract_edges();
- std::cout << "Counting final number of simplices \n";
+ std::clog << "Counting final number of simplices \n";
unsigned num_simplices = std::distance(complex.complex_simplex_range().begin(), complex.complex_simplex_range().end());
- std::cout << "Final complex has " <<
+ std::clog << "Final complex has " <<
complex.num_vertices() << " vertices, " <<
complex.num_edges() << " edges, " <<
complex.num_blockers() << " blockers and " <<
num_simplices << " simplices" << std::endl;
- std::cout << contraction_chrono;
+ std::clog << contraction_chrono;
return EXIT_SUCCESS;
}
diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h
index 6058d64b..6c0f4c78 100644
--- a/src/Contraction/include/gudhi/Edge_contraction.h
+++ b/src/Contraction/include/gudhi/Edge_contraction.h
@@ -164,13 +164,13 @@ int main (int argc, char *argv[])
std::cerr << "Unable to read file:"<<argv[1]<<std::endl;
return EXIT_FAILURE;
}
- std::cout << "Build the Rips complex"<<std::endl;
+ std::clog << "Build the Rips complex"<<std::endl;
build_rips(complex,atof(argv[2]));
boost::timer::auto_cpu_timer t;
- std::cout << "Initial complex has "<<
+ std::clog << "Initial complex has "<<
complex.num_vertices()<<" vertices and "<<
complex.num_edges()<<" edges"<<std::endl;
@@ -181,17 +181,17 @@ int main (int argc, char *argv[])
contraction::make_remove_popable_blockers_visitor<Profile>());
contractor.contract_edges();
- std::cout << "Counting final number of simplices \n";
+ std::clog << "Counting final number of simplices \n";
unsigned num_simplices = std::distance(complex.star_simplex_range().begin(),complex.star_simplex_range().end());
- std::cout << "Final complex has "<<
+ std::clog << "Final complex has "<<
complex.num_vertices()<<" vertices, "<<
complex.num_edges()<<" edges, "<<
complex.num_blockers()<<" blockers and "<<
num_simplices<<" simplices"<<std::endl;
- std::cout << "Time to simplify and enumerate simplices:\n";
+ std::clog << "Time to simplify and enumerate simplices:\n";
return EXIT_SUCCESS;
}
diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h
index dd9bdaab..626ef59d 100644
--- a/src/GudhUI/model/Model.h
+++ b/src/GudhUI/model/Model.h
@@ -60,7 +60,7 @@ class CGAL_geometric_flag_complex_wrapper {
void maximal_face(std::vector<int> vertices) {
if (!load_only_points_) {
- // std::cout << "size:" << vertices.size() << std::endl;
+ // std::clog << "size:" << vertices.size() << std::endl;
for (std::size_t i = 0; i < vertices.size(); ++i)
for (std::size_t j = i + 1; j < vertices.size(); ++j)
complex_.add_edge_without_blockers(Vertex_handle(vertices[i]), Vertex_handle(vertices[j]));
@@ -178,7 +178,7 @@ class Model {
void contract_edges(unsigned num_contractions) {
Gudhi::Clock c;
Edge_contractor<Complex> contractor(complex_, num_contractions);
- std::cout << "Time to simplify: " << c.num_seconds() << "s" << std::endl;
+ std::clog << "Time to simplify: " << c.num_seconds() << "s" << std::endl;
}
void collapse_vertices(unsigned num_collapses) {
@@ -192,14 +192,14 @@ class Model {
}
void show_graph_stats() {
- std::cout << "++++++ Graph stats +++++++" << std::endl;
- std::cout << "Num vertices : " << complex_.num_vertices() << std::endl;
- std::cout << "Num edges : " << complex_.num_edges() << std::endl;
- std::cout << "Num connected components : " << complex_.num_connected_components() << std::endl;
- std::cout << "Min/avg/max degree : " << min_degree() << "/" << avg_degree() << "/" << max_degree() << std::endl;
- std::cout << "Num connected components : " << complex_.num_connected_components() << std::endl;
- std::cout << "Num connected components : " << complex_.num_connected_components() << std::endl;
- std::cout << "+++++++++++++++++++++++++" << std::endl;
+ std::clog << "++++++ Graph stats +++++++" << std::endl;
+ std::clog << "Num vertices : " << complex_.num_vertices() << std::endl;
+ std::clog << "Num edges : " << complex_.num_edges() << std::endl;
+ std::clog << "Num connected components : " << complex_.num_connected_components() << std::endl;
+ std::clog << "Min/avg/max degree : " << min_degree() << "/" << avg_degree() << "/" << max_degree() << std::endl;
+ std::clog << "Num connected components : " << complex_.num_connected_components() << std::endl;
+ std::clog << "Num connected components : " << complex_.num_connected_components() << std::endl;
+ std::clog << "+++++++++++++++++++++++++" << std::endl;
}
private:
@@ -226,11 +226,11 @@ class Model {
public:
void show_complex_stats() {
- std::cout << "++++++ Mesh stats +++++++" << std::endl;
- std::cout << "Num vertices : " << complex_.num_vertices() << std::endl;
- std::cout << "Num edges : " << complex_.num_edges() << std::endl;
- std::cout << "Num connected components : " << complex_.num_connected_components() << std::endl;
- std::cout << "+++++++++++++++++++++++++" << std::endl;
+ std::clog << "++++++ Mesh stats +++++++" << std::endl;
+ std::clog << "Num vertices : " << complex_.num_vertices() << std::endl;
+ std::clog << "Num edges : " << complex_.num_edges() << std::endl;
+ std::clog << "Num connected components : " << complex_.num_connected_components() << std::endl;
+ std::clog << "+++++++++++++++++++++++++" << std::endl;
}
void show_complex_dimension() {
@@ -247,18 +247,18 @@ class Model {
euler -= 1;
}
clock.end();
- std::cout << "++++++ Mesh dimension +++++++" << std::endl;
- std::cout << "Dimension : " << dimension << std::endl;
- std::cout << "Euler characteristic : " << euler << std::endl;
- std::cout << "Num simplices : " << num_simplices << std::endl;
- std::cout << "Total time: " << clock << std::endl;
- std::cout << "Time per simplex: " << clock.num_seconds() / num_simplices << " s" << std::endl;
- std::cout << "+++++++++++++++++++++++++" << std::endl;
+ std::clog << "++++++ Mesh dimension +++++++" << std::endl;
+ std::clog << "Dimension : " << dimension << std::endl;
+ std::clog << "Euler characteristic : " << euler << std::endl;
+ std::clog << "Num simplices : " << num_simplices << std::endl;
+ std::clog << "Total time: " << clock << std::endl;
+ std::clog << "Time per simplex: " << clock.num_seconds() / num_simplices << " s" << std::endl;
+ std::clog << "+++++++++++++++++++++++++" << std::endl;
}
void show_homology_group() {
#ifdef _WIN32
- std::cout << "Works only on linux x64 for the moment\n";
+ std::clog << "Works only on linux x64 for the moment\n";
#else
Gudhi::Clock clock;
run_chomp();
@@ -278,16 +278,16 @@ class Model {
else
euler -= 1;
}
- std::cout << "Saw " << num_simplices << " simplices with maximum dimension " << dimension << std::endl;
- std::cout << "The euler characteristic is : " << euler << std::endl;
+ std::clog << "Saw " << num_simplices << " simplices with maximum dimension " << dimension << std::endl;
+ std::clog << "The euler characteristic is : " << euler << std::endl;
}
void show_persistence(int p, double threshold, int max_dim, double min_pers) {
- Persistence_compute<Complex> persistence(complex_, std::cout, Persistence_params(p, threshold, max_dim, min_pers));
+ Persistence_compute<Complex> persistence(complex_, std::clog, Persistence_params(p, threshold, max_dim, min_pers));
}
void show_critical_points(double max_distance) {
- Critical_points<Complex> critical_points(complex_, std::cout, max_distance);
+ Critical_points<Complex> critical_points(complex_, std::clog, max_distance);
}
void show_is_manifold() {
@@ -296,12 +296,12 @@ class Model {
Is_manifold<Complex> test_manifold(complex_, dim, is_manifold);
if (is_manifold) {
- std::cout << "The complex is a " << dim << "-manifold\n";
+ std::clog << "The complex is a " << dim << "-manifold\n";
} else {
if (dim < 4) {
- std::cout << "The complex has dimension greater than " << dim << " and is not a manifold\n";
+ std::clog << "The complex has dimension greater than " << dim << " and is not a manifold\n";
} else {
- std::cout << "The complex has dimension>=4 and may or may not be a manifold\n";
+ std::clog << "The complex has dimension>=4 and may or may not be a manifold\n";
}
}
}
@@ -309,10 +309,10 @@ class Model {
private:
void run_chomp() {
save_complex_in_file_for_chomp();
- std::cout << "Call CHOMP library\n";
+ std::clog << "Call CHOMP library\n";
int returnValue = system("homsimpl chomp.sim");
if (returnValue != 0) {
- std::cout << "homsimpl (from CHOMP) failed. Please check it is installed or available in the PATH."
+ std::cerr << "homsimpl (from CHOMP) failed. Please check it is installed or available in the PATH."
<< std::endl;
}
}
diff --git a/src/GudhUI/utils/Bar_code_persistence.h b/src/GudhUI/utils/Bar_code_persistence.h
index cd9b009f..b526017a 100644
--- a/src/GudhUI/utils/Bar_code_persistence.h
+++ b/src/GudhUI/utils/Bar_code_persistence.h
@@ -58,13 +58,13 @@ class Bar_code_persistence {
QGraphicsScene * scene = new QGraphicsScene();
view->setScene(scene);
double ratio = 600.0 / (max_death - min_birth);
- // std::cout << "min_birth=" << min_birth << " - max_death=" << max_death << " - ratio=" << ratio << std::endl;
+ // std::clog << "min_birth=" << min_birth << " - max_death=" << max_death << " - ratio=" << ratio << std::endl;
double height = 0.0, birth = 0.0, death = 0.0;
int pers_num = 1;
for (auto& persistence : persistence_vector) {
height = 5.0 * pers_num;
- // std::cout << "[" << pers_num << "] birth=" << persistence.first << " - death=" << persistence.second << std::endl;
+ // std::clog << "[" << pers_num << "] birth=" << persistence.first << " - death=" << persistence.second << std::endl;
if (std::isfinite(persistence.first))
birth = ((persistence.first - min_birth) * ratio) + 50.0;
else
diff --git a/src/GudhUI/utils/Critical_points.h b/src/GudhUI/utils/Critical_points.h
index 32fcf32e..97e58737 100644
--- a/src/GudhUI/utils/Critical_points.h
+++ b/src/GudhUI/utils/Critical_points.h
@@ -65,7 +65,7 @@ template<typename SkBlComplex> class Critical_points {
void anti_collapse_edges(const std::deque<Edge>& edges) {
unsigned pos = 0;
for (Edge e : edges) {
- std::cout << "edge " << pos++ << "/" << edges.size() << "\n";
+ std::clog << "edge " << pos++ << "/" << edges.size() << "\n";
auto eh = filled_complex_.add_edge_without_blockers(e.first, e.second);
int is_contractible(is_link_reducible(eh));
diff --git a/src/GudhUI/utils/Rips_builder.h b/src/GudhUI/utils/Rips_builder.h
index aba1a8e4..0300190c 100644
--- a/src/GudhUI/utils/Rips_builder.h
+++ b/src/GudhUI/utils/Rips_builder.h
@@ -43,13 +43,13 @@ template<typename SkBlComplex> class Rips_builder {
void compute_edges(double alpha) {
auto vertices = complex_.vertex_range();
for (auto p = vertices.begin(); p != vertices.end(); ++p) {
- std::cout << *p << " ";
- std::cout.flush();
+ std::clog << *p << " ";
+ std::clog.flush();
for (auto q = p; ++q != vertices.end(); /**/)
if (squared_eucl_distance(complex_.point(*p), complex_.point(*q)) < 4 * alpha * alpha)
complex_.add_edge_without_blockers(*p, *q);
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
};
diff --git a/src/GudhUI/view/View_parameter.h b/src/GudhUI/view/View_parameter.h
index dfd3aa41..3671f4fb 100644
--- a/src/GudhUI/view/View_parameter.h
+++ b/src/GudhUI/view/View_parameter.h
@@ -52,13 +52,13 @@ class View_parameter {
void change_vertex_mode() {
int current_value = vertex_mode;
vertex_mode = static_cast<VERTEX_MODE> (++current_value % V_COUNT);
- std::cout << "Vertex mode : ";
+ std::clog << "Vertex mode : ";
switch (vertex_mode) {
case V_NONE:
- std::cout << "empty\n";
+ std::clog << "empty\n";
break;
case V_SIMPLE:
- std::cout << "simple\n";
+ std::clog << "simple\n";
break;
default:
break;
diff --git a/src/Hasse_complex/include/gudhi/Hasse_complex.h b/src/Hasse_complex/include/gudhi/Hasse_complex.h
index 209fd0b9..8ce8c36f 100644
--- a/src/Hasse_complex/include/gudhi/Hasse_complex.h
+++ b/src/Hasse_complex/include/gudhi/Hasse_complex.h
@@ -173,9 +173,9 @@ class Hasse_complex {
}
void display_simplex(Simplex_handle sh) {
- std::cout << dimension(sh) << " ";
- for (auto sh_b : boundary_simplex_range(sh)) std::cout << sh_b << " ";
- std::cout << " " << filtration(sh) << " key=" << key(sh);
+ std::clog << dimension(sh) << " ";
+ for (auto sh_b : boundary_simplex_range(sh)) std::clog << sh_b << " ";
+ std::clog << " " << filtration(sh) << " key=" << key(sh);
}
void initialize_filtration() {
diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp
index fd9c224a..f0afdca5 100644
--- a/src/Nerve_GIC/example/CoordGIC.cpp
+++ b/src/Nerve_GIC/example/CoordGIC.cpp
@@ -40,7 +40,7 @@ int main(int argc, char **argv) {
bool check = GIC.read_point_cloud(off_file_name);
if (!check) {
- std::cout << "Incorrect OFF file." << std::endl;
+ std::clog << "Incorrect OFF file." << std::endl;
} else {
GIC.set_type("GIC");
@@ -67,15 +67,15 @@ int main(int argc, char **argv) {
// --------------------------------------------
if (verb) {
- std::cout << "Coordinate GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ std::clog << "Coordinate GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
<< " simplices - " << stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on coordinate GIC simplices" << std::endl;
+ std::clog << "Iterator on coordinate GIC simplices" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
}
diff --git a/src/Nerve_GIC/example/FuncGIC.cpp b/src/Nerve_GIC/example/FuncGIC.cpp
index 5a323795..518e1826 100644
--- a/src/Nerve_GIC/example/FuncGIC.cpp
+++ b/src/Nerve_GIC/example/FuncGIC.cpp
@@ -41,7 +41,7 @@ int main(int argc, char **argv) {
bool check = GIC.read_point_cloud(off_file_name);
if (!check) {
- std::cout << "Incorrect OFF file." << std::endl;
+ std::clog << "Incorrect OFF file." << std::endl;
} else {
GIC.set_type("GIC");
@@ -65,15 +65,15 @@ int main(int argc, char **argv) {
// --------------------------------------------
if (verb) {
- std::cout << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ std::clog << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
<< " simplices - " << stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on functional GIC simplices" << std::endl;
+ std::clog << "Iterator on functional GIC simplices" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
}
diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h
index 2a6d4788..1b1f9323 100644
--- a/src/Nerve_GIC/include/gudhi/GIC.h
+++ b/src/Nerve_GIC/include/gudhi/GIC.h
@@ -139,19 +139,9 @@ class Cover_complex {
for (boost::tie(ei, ei_end) = boost::edges(G); ei != ei_end; ++ei) boost::remove_edge(*ei, G);
}
- // Thread local is not available on XCode version < V.8
- // If not available, random engine is a class member.
-#ifndef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- std::default_random_engine re;
-#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
-
// Find random number in [0,1].
double GetUniform() {
- // Thread local is not available on XCode version < V.8
- // If available, random engine is defined for each thread.
-#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
thread_local std::default_random_engine re;
-#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
std::uniform_real_distribution<double> Dist(0, 1);
return Dist(re);
}
@@ -344,7 +334,7 @@ class Cover_complex {
if (num_edges(one_skeleton_OFF))
one_skeleton = one_skeleton_OFF;
else
- std::cout << "No triangulation read in OFF file!" << std::endl;
+ std::cerr << "No triangulation read in OFF file!" << std::endl;
}
public: // Set graph from Rips complex.
@@ -407,7 +397,7 @@ class Cover_complex {
std::ifstream input(distance, std::ios::out | std::ios::binary);
if (input.good()) {
- if (verbose) std::cout << "Reading distances..." << std::endl;
+ if (verbose) std::clog << "Reading distances..." << std::endl;
for (int i = 0; i < n; i++) {
for (int j = i; j < n; j++) {
input.read((char*)&d, 8);
@@ -417,12 +407,12 @@ class Cover_complex {
}
input.close();
} else {
- if (verbose) std::cout << "Computing distances..." << std::endl;
+ if (verbose) std::clog << "Computing distances..." << std::endl;
input.close();
std::ofstream output(distance, std::ios::out | std::ios::binary);
for (int i = 0; i < n; i++) {
int state = (int)floor(100 * (i * 1.0 + 1) / n) % 10;
- if (state == 0 && verbose) std::cout << "\r" << state << "%" << std::flush;
+ if (state == 0 && verbose) std::clog << "\r" << state << "%" << std::flush;
for (int j = i; j < n; j++) {
double dis = ref_distance(point_cloud[i], point_cloud[j]);
distances[i][j] = dis;
@@ -431,7 +421,7 @@ class Cover_complex {
}
}
output.close();
- if (verbose) std::cout << std::endl;
+ if (verbose) std::clog << std::endl;
}
}
@@ -451,14 +441,12 @@ class Cover_complex {
m = (std::min)(m, n - 1);
double delta = 0;
- if (verbose) std::cout << n << " points in R^" << data_dimension << std::endl;
- if (verbose) std::cout << "Subsampling " << m << " points" << std::endl;
+ if (verbose) std::clog << n << " points in R^" << data_dimension << std::endl;
+ if (verbose) std::clog << "Subsampling " << m << " points" << std::endl;
if (distances.size() == 0) compute_pairwise_distances(distance);
- // This cannot be parallelized if thread_local is not defined
- // thread_local is not defined for XCode < v.8
- #if defined(GUDHI_USE_TBB) && defined(GUDHI_CAN_USE_CXX11_THREAD_LOCAL)
+ #ifdef GUDHI_USE_TBB
std::mutex deltamutex;
tbb::parallel_for(0, N, [&](int i){
std::vector<int> samples(m);
@@ -487,7 +475,7 @@ class Cover_complex {
}
#endif
- if (verbose) std::cout << "delta = " << delta << std::endl;
+ if (verbose) std::clog << "delta = " << delta << std::endl;
set_graph_from_rips(delta, distance);
return delta;
}
@@ -530,7 +518,7 @@ class Cover_complex {
cover_name = "coordinate " + std::to_string(k);
}
else{
- std::cout << "Only pairwise distances provided---cannot access " << k << "th coordinate; returning null vector instead" << std::endl;
+ std::cerr << "Only pairwise distances provided---cannot access " << k << "th coordinate; returning null vector instead" << std::endl;
for (int i = 0; i < n; i++) func.push_back(0.0);
functional_cover = true;
cover_name = "null";
@@ -563,11 +551,11 @@ class Cover_complex {
*/
double set_automatic_resolution() {
if (!functional_cover) {
- std::cout << "Cover needs to come from the preimages of a function." << std::endl;
+ std::cerr << "Cover needs to come from the preimages of a function." << std::endl;
return 0;
}
if (type != "Nerve" && type != "GIC") {
- std::cout << "Type of complex needs to be specified." << std::endl;
+ std::cerr << "Type of complex needs to be specified." << std::endl;
return 0;
}
@@ -579,7 +567,7 @@ class Cover_complex {
for (boost::tie(ei, ei_end) = boost::edges(one_skeleton); ei != ei_end; ++ei)
reso = (std::max)(reso, std::abs(func[index[boost::source(*ei, one_skeleton)]] -
func[index[boost::target(*ei, one_skeleton)]]));
- if (verbose) std::cout << "resolution = " << reso << std::endl;
+ if (verbose) std::clog << "resolution = " << reso << std::endl;
resolution_double = reso;
}
@@ -589,7 +577,7 @@ class Cover_complex {
reso = (std::max)(reso, std::abs(func[index[boost::source(*ei, one_skeleton)]] -
func[index[boost::target(*ei, one_skeleton)]]) /
gain);
- if (verbose) std::cout << "resolution = " << reso << std::endl;
+ if (verbose) std::clog << "resolution = " << reso << std::endl;
resolution_double = reso;
}
@@ -622,11 +610,11 @@ class Cover_complex {
*/
void set_cover_from_function() {
if (resolution_double == -1 && resolution_int == -1) {
- std::cout << "Number and/or length of intervals not specified" << std::endl;
+ std::cerr << "Number and/or length of intervals not specified" << std::endl;
return;
}
if (gain == -1) {
- std::cout << "Gain not specified" << std::endl;
+ std::cerr << "Gain not specified" << std::endl;
return;
}
@@ -637,7 +625,7 @@ class Cover_complex {
minf = (std::min)(minf, func[i]);
maxf = (std::max)(maxf, func[i]);
}
- if (verbose) std::cout << "Min function value = " << minf << " and Max function value = " << maxf << std::endl;
+ if (verbose) std::clog << "Min function value = " << minf << " and Max function value = " << maxf << std::endl;
// Compute cover of im(f)
std::vector<std::pair<double, double> > intervals;
@@ -663,7 +651,7 @@ class Cover_complex {
res = intervals.size();
if (verbose) {
for (int i = 0; i < res; i++)
- std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ std::clog << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
<< std::endl;
}
} else {
@@ -681,7 +669,7 @@ class Cover_complex {
res = intervals.size();
if (verbose) {
for (int i = 0; i < res; i++)
- std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ std::clog << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
<< std::endl;
}
} else { // Case we use an integer and a double for the length of the intervals.
@@ -698,7 +686,7 @@ class Cover_complex {
res = intervals.size();
if (verbose) {
for (int i = 0; i < res; i++)
- std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ std::clog << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
<< std::endl;
}
}
@@ -715,7 +703,7 @@ class Cover_complex {
std::map<int, std::vector<int> > preimages;
std::map<int, double> funcstd;
- if (verbose) std::cout << "Computing preimages..." << std::endl;
+ if (verbose) std::clog << "Computing preimages..." << std::endl;
for (int i = 0; i < res; i++) {
// Find points in the preimage
std::pair<double, double> inter1 = intervals[i];
@@ -764,7 +752,7 @@ class Cover_complex {
}
#ifdef GUDHI_USE_TBB
- if (verbose) std::cout << "Computing connected components (parallelized)..." << std::endl;
+ if (verbose) std::clog << "Computing connected components (parallelized)..." << std::endl;
std::mutex covermutex, idmutex;
tbb::parallel_for(0, res, [&](int i){
// Compute connected components
@@ -800,7 +788,7 @@ class Cover_complex {
idmutex.unlock();
});
#else
- if (verbose) std::cout << "Computing connected components..." << std::endl;
+ if (verbose) std::clog << "Computing connected components..." << std::endl;
for (int i = 0; i < res; i++) {
// Compute connected components
Graph G = one_skeleton.create_subgraph();
@@ -894,7 +882,7 @@ class Cover_complex {
// Compute the geodesic distances to subsamples with Dijkstra
#ifdef GUDHI_USE_TBB
- if (verbose) std::cout << "Computing geodesic distances (parallelized)..." << std::endl;
+ if (verbose) std::clog << "Computing geodesic distances (parallelized)..." << std::endl;
std::mutex coverMutex; std::mutex mindistMutex;
tbb::parallel_for(0, m, [&](int i){
int seed = voronoi_subsamples[i];
@@ -916,7 +904,7 @@ class Cover_complex {
});
#else
for (int i = 0; i < m; i++) {
- if (verbose) std::cout << "Computing geodesic distances to seed " << i << "..." << std::endl;
+ if (verbose) std::clog << "Computing geodesic distances to seed " << i << "..." << std::endl;
int seed = voronoi_subsamples[i];
std::vector<double> dmap(n);
boost::dijkstra_shortest_paths(
@@ -991,7 +979,7 @@ class Cover_complex {
color_name.append(std::to_string(k));
}
else{
- std::cout << "Only pairwise distances provided---cannot access " << k << "th coordinate; returning null vector instead" << std::endl;
+ std::cerr << "Only pairwise distances provided---cannot access " << k << "th coordinate; returning null vector instead" << std::endl;
for (int i = 0; i < n; i++) func.push_back(0.0);
functional_cover = true;
cover_name = "null";
@@ -1054,7 +1042,7 @@ class Cover_complex {
}
graphic << "}";
graphic.close();
- std::cout << mapp << " file generated. It can be visualized with e.g. neato." << std::endl;
+ std::clog << mapp << " file generated. It can be visualized with e.g. neato." << std::endl;
}
public: // Create a .txt file that can be compiled with KeplerMapper.
@@ -1090,7 +1078,7 @@ class Cover_complex {
if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask)
graphic << name2id[simplices[i][0]] << " " << name2id[simplices[i][1]] << std::endl;
graphic.close();
- std::cout << mapp
+ std::clog << mapp
<< " generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox."
<< std::endl;
}
@@ -1137,7 +1125,7 @@ class Cover_complex {
for (int i = 0; i < numfaces; i++)
graphic << 3 << " " << faces[i][0] << " " << faces[i][1] << " " << faces[i][2] << std::endl;
graphic.close();
- std::cout << mapp << " generated. It can be visualized with e.g. geomview." << std::endl;
+ std::clog << mapp << " generated. It can be visualized with e.g. geomview." << std::endl;
}
// *******************************************************************************************************************
@@ -1185,7 +1173,7 @@ class Cover_complex {
for (int i = 0; i < max_dim; i++) {
std::vector<std::pair<double, double> > bars = pcoh.intervals_in_dimension(i);
int num_bars = bars.size(); if(i == 0) num_bars -= 1;
- if(verbose) std::cout << num_bars << " interval(s) in dimension " << i << ":" << std::endl;
+ if(verbose) std::clog << num_bars << " interval(s) in dimension " << i << ":" << std::endl;
for (int j = 0; j < num_bars; j++) {
double birth = bars[j].first;
double death = bars[j].second;
@@ -1199,7 +1187,7 @@ class Cover_complex {
else
death = minf + (2 - death) * (maxf - minf);
PD.push_back(std::pair<double, double>(birth, death));
- if (verbose) std::cout << " [" << birth << ", " << death << "]" << std::endl;
+ if (verbose) std::clog << " [" << birth << ", " << death << "]" << std::endl;
}
}
return PD;
@@ -1213,11 +1201,9 @@ class Cover_complex {
*/
void compute_distribution(unsigned int N = 100) {
unsigned int sz = distribution.size();
- if (sz >= N) {
- std::cout << "Already done!" << std::endl;
- } else {
+ if (sz < N) {
for (unsigned int i = 0; i < N - sz; i++) {
- if (verbose) std::cout << "Computing " << i << "th bootstrap, bottleneck distance = ";
+ if (verbose) std::clog << "Computing " << i << "th bootstrap, bottleneck distance = ";
Cover_complex Cboot; Cboot.n = this->n; Cboot.data_dimension = this->data_dimension; Cboot.type = this->type; Cboot.functional_cover = true;
@@ -1243,7 +1229,7 @@ class Cover_complex {
Cboot.find_simplices();
Cboot.compute_PD();
double db = Gudhi::persistence_diagram::bottleneck_distance(this->PD, Cboot.PD);
- if (verbose) std::cout << db << std::endl;
+ if (verbose) std::clog << db << std::endl;
distribution.push_back(db);
}
@@ -1260,7 +1246,7 @@ class Cover_complex {
double compute_distance_from_confidence_level(double alpha) {
unsigned int N = distribution.size();
double d = distribution[std::floor(alpha * N)];
- if (verbose) std::cout << "Distance corresponding to confidence " << alpha << " is " << d << std::endl;
+ if (verbose) std::clog << "Distance corresponding to confidence " << alpha << " is " << d << std::endl;
return d;
}
@@ -1275,7 +1261,7 @@ class Cover_complex {
double level = 1;
for (unsigned int i = 0; i < N; i++)
if (distribution[i] >= d){ level = i * 1.0 / N; break; }
- if (verbose) std::cout << "Confidence level of distance " << d << " is " << level << std::endl;
+ if (verbose) std::clog << "Confidence level of distance " << d << " is " << level << std::endl;
return level;
}
@@ -1288,7 +1274,7 @@ class Cover_complex {
double distancemin = (std::numeric_limits<double>::max)(); int N = PD.size();
for (int i = 0; i < N; i++) distancemin = (std::min)(distancemin, 0.5 * std::abs(PD[i].second - PD[i].first));
double p_value = 1 - compute_confidence_level_from_distance(distancemin);
- if (verbose) std::cout << "p value = " << p_value << std::endl;
+ if (verbose) std::clog << "p value = " << p_value << std::endl;
return p_value;
}
@@ -1319,7 +1305,7 @@ class Cover_complex {
*/
void find_simplices() {
if (type != "Nerve" && type != "GIC") {
- std::cout << "Type of complex needs to be specified." << std::endl;
+ std::cerr << "Type of complex needs to be specified." << std::endl;
return;
}
diff --git a/src/Nerve_GIC/utilities/Nerve.cpp b/src/Nerve_GIC/utilities/Nerve.cpp
index d34e922c..7b09f89d 100644
--- a/src/Nerve_GIC/utilities/Nerve.cpp
+++ b/src/Nerve_GIC/utilities/Nerve.cpp
@@ -42,7 +42,7 @@ int main(int argc, char **argv) {
bool check = SC.read_point_cloud(off_file_name);
if (!check) {
- std::cout << "Incorrect OFF file." << std::endl;
+ std::clog << "Incorrect OFF file." << std::endl;
} else {
SC.set_type("Nerve");
@@ -67,15 +67,15 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
if (verb) {
- std::cout << "Nerve is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
+ std::clog << "Nerve is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
<< stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on Nerve simplices" << std::endl;
+ std::clog << "Iterator on Nerve simplices" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
}
diff --git a/src/Nerve_GIC/utilities/VoronoiGIC.cpp b/src/Nerve_GIC/utilities/VoronoiGIC.cpp
index 0182c948..117c89fb 100644
--- a/src/Nerve_GIC/utilities/VoronoiGIC.cpp
+++ b/src/Nerve_GIC/utilities/VoronoiGIC.cpp
@@ -40,7 +40,7 @@ int main(int argc, char **argv) {
bool check = GIC.read_point_cloud(off_file_name);
if (!check) {
- std::cout << "Incorrect OFF file." << std::endl;
+ std::clog << "Incorrect OFF file." << std::endl;
} else {
GIC.set_type("GIC");
@@ -61,15 +61,15 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
if (verb) {
- std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ std::clog << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
<< " simplices - " << stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on graph induced complex simplices" << std::endl;
+ std::clog << "Iterator on graph induced complex simplices" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
}
diff --git a/src/Persistence_representations/example/persistence_heat_maps.cpp b/src/Persistence_representations/example/persistence_heat_maps.cpp
index 1bf3a637..9fd6779a 100644
--- a/src/Persistence_representations/example/persistence_heat_maps.cpp
+++ b/src/Persistence_representations/example/persistence_heat_maps.cpp
@@ -65,7 +65,7 @@ int main(int argc, char** argv) {
median.compute_median(vector_of_maps);
// to compute L^1 distance between hm1 and hm2:
- std::cout << "The L^1 distance is : " << hm1.distance(hm2, 1) << std::endl;
+ std::clog << "The L^1 distance is : " << hm1.distance(hm2, 1) << std::endl;
// to average of hm1 and hm2:
std::vector<Persistence_heat_maps*> to_average;
@@ -75,15 +75,15 @@ int main(int argc, char** argv) {
av.compute_average(to_average);
// to compute scalar product of hm1 and hm2:
- std::cout << "Scalar product is : " << hm1.compute_scalar_product(hm2) << std::endl;
+ std::clog << "Scalar product is : " << hm1.compute_scalar_product(hm2) << std::endl;
Persistence_heat_maps hm1k(persistence1, Gaussian_function(1.0));
Persistence_heat_maps hm2k(persistence2, Gaussian_function(1.0));
Persistence_heat_maps hm1i(persistence1, Gaussian_function(1.0), 20, 20, 0, 11, 0, 11);
Persistence_heat_maps hm2i(persistence2, Gaussian_function(1.0), 20, 20, 0, 11, 0, 11);
- std::cout << "Scalar product computed with exact 2D kernel on grid is : " << hm1i.compute_scalar_product(hm2i)
+ std::clog << "Scalar product computed with exact 2D kernel on grid is : " << hm1i.compute_scalar_product(hm2i)
<< std::endl;
- std::cout << "Scalar product computed with exact 2D kernel is : " << hm1k.compute_scalar_product(hm2k) << std::endl;
+ std::clog << "Scalar product computed with exact 2D kernel is : " << hm1k.compute_scalar_product(hm2k) << std::endl;
return 0;
}
diff --git a/src/Persistence_representations/example/persistence_intervals.cpp b/src/Persistence_representations/example/persistence_intervals.cpp
index c908581c..748b9ae4 100644
--- a/src/Persistence_representations/example/persistence_intervals.cpp
+++ b/src/Persistence_representations/example/persistence_intervals.cpp
@@ -18,59 +18,59 @@ using Persistence_intervals = Gudhi::Persistence_representations::Persistence_in
int main(int argc, char** argv) {
if (argc != 2) {
- std::cout << "To run this program, please provide the name of a file with persistence diagram \n";
+ std::clog << "To run this program, please provide the name of a file with persistence diagram \n";
return 1;
}
Persistence_intervals p(argv[1]);
std::pair<double, double> min_max_ = p.get_x_range();
- std::cout << "Birth-death range : " << min_max_.first << " " << min_max_.second << std::endl;
+ std::clog << "Birth-death range : " << min_max_.first << " " << min_max_.second << std::endl;
std::vector<double> dominant_ten_intervals_length = p.length_of_dominant_intervals(10);
- std::cout << "Length of ten dominant intervals : " << std::endl;
+ std::clog << "Length of ten dominant intervals : " << std::endl;
for (size_t i = 0; i != dominant_ten_intervals_length.size(); ++i) {
- std::cout << dominant_ten_intervals_length[i] << std::endl;
+ std::clog << dominant_ten_intervals_length[i] << std::endl;
}
std::vector<std::pair<double, double> > ten_dominant_intervals = p.dominant_intervals(10);
- std::cout << "Here are the dominant intervals : " << std::endl;
+ std::clog << "Here are the dominant intervals : " << std::endl;
for (size_t i = 0; i != ten_dominant_intervals.size(); ++i) {
- std::cout << "( " << ten_dominant_intervals[i].first << "," << ten_dominant_intervals[i].second << std::endl;
+ std::clog << "( " << ten_dominant_intervals[i].first << "," << ten_dominant_intervals[i].second << std::endl;
}
std::vector<size_t> histogram = p.histogram_of_lengths(10);
- std::cout << "Here is the histogram of barcode's length : " << std::endl;
+ std::clog << "Here is the histogram of barcode's length : " << std::endl;
for (size_t i = 0; i != histogram.size(); ++i) {
- std::cout << histogram[i] << " ";
+ std::clog << histogram[i] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
std::vector<size_t> cumulative_histogram = p.cumulative_histogram_of_lengths(10);
- std::cout << "Cumulative histogram : " << std::endl;
+ std::clog << "Cumulative histogram : " << std::endl;
for (size_t i = 0; i != cumulative_histogram.size(); ++i) {
- std::cout << cumulative_histogram[i] << " ";
+ std::clog << cumulative_histogram[i] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
std::vector<double> char_funct_diag = p.characteristic_function_of_diagram(min_max_.first, min_max_.second);
- std::cout << "Characteristic function of diagram : " << std::endl;
+ std::clog << "Characteristic function of diagram : " << std::endl;
for (size_t i = 0; i != char_funct_diag.size(); ++i) {
- std::cout << char_funct_diag[i] << " ";
+ std::clog << char_funct_diag[i] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
std::vector<double> cumul_char_funct_diag =
p.cumulative_characteristic_function_of_diagram(min_max_.first, min_max_.second);
- std::cout << "Cumulative characteristic function of diagram : " << std::endl;
+ std::clog << "Cumulative characteristic function of diagram : " << std::endl;
for (size_t i = 0; i != cumul_char_funct_diag.size(); ++i) {
- std::cout << cumul_char_funct_diag[i] << " ";
+ std::clog << cumul_char_funct_diag[i] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
- std::cout << "Persistence Betti numbers \n";
+ std::clog << "Persistence Betti numbers \n";
std::vector<std::pair<double, size_t> > pbns = p.compute_persistent_betti_numbers();
for (size_t i = 0; i != pbns.size(); ++i) {
- std::cout << pbns[i].first << " " << pbns[i].second << std::endl;
+ std::clog << pbns[i].first << " " << pbns[i].second << std::endl;
}
return 0;
diff --git a/src/Persistence_representations/example/persistence_landscape.cpp b/src/Persistence_representations/example/persistence_landscape.cpp
index ff18d105..d39ae0b8 100644
--- a/src/Persistence_representations/example/persistence_landscape.cpp
+++ b/src/Persistence_representations/example/persistence_landscape.cpp
@@ -37,35 +37,35 @@ int main(int argc, char** argv) {
Persistence_landscape l2(persistence2);
// This is how to compute integral of landscapes:
- std::cout << "Integral of the first landscape : " << l1.compute_integral_of_landscape() << std::endl;
- std::cout << "Integral of the second landscape : " << l2.compute_integral_of_landscape() << std::endl;
+ std::clog << "Integral of the first landscape : " << l1.compute_integral_of_landscape() << std::endl;
+ std::clog << "Integral of the second landscape : " << l2.compute_integral_of_landscape() << std::endl;
// And here how to write landscapes to stream:
- std::cout << "l1 : " << l1 << std::endl;
- std::cout << "l2 : " << l2 << std::endl;
+ std::clog << "l1 : " << l1 << std::endl;
+ std::clog << "l2 : " << l2 << std::endl;
// Arithmetic operations on landscapes:
Persistence_landscape sum = l1 + l2;
- std::cout << "sum : " << sum << std::endl;
+ std::clog << "sum : " << sum << std::endl;
// here are the maxima of the functions:
- std::cout << "Maximum of l1 : " << l1.compute_maximum() << std::endl;
- std::cout << "Maximum of l2 : " << l2.compute_maximum() << std::endl;
+ std::clog << "Maximum of l1 : " << l1.compute_maximum() << std::endl;
+ std::clog << "Maximum of l2 : " << l2.compute_maximum() << std::endl;
// here are the norms of landscapes:
- std::cout << "L^1 Norm of l1 : " << l1.compute_norm_of_landscape(1.) << std::endl;
- std::cout << "L^1 Norm of l2 : " << l2.compute_norm_of_landscape(1.) << std::endl;
+ std::clog << "L^1 Norm of l1 : " << l1.compute_norm_of_landscape(1.) << std::endl;
+ std::clog << "L^1 Norm of l2 : " << l2.compute_norm_of_landscape(1.) << std::endl;
// here is the average of landscapes:
Persistence_landscape average;
average.compute_average({&l1, &l2});
- std::cout << "average : " << average << std::endl;
+ std::clog << "average : " << average << std::endl;
// here is the distance of landscapes:
- std::cout << "Distance : " << l1.distance(l2) << std::endl;
+ std::clog << "Distance : " << l1.distance(l2) << std::endl;
// here is the scalar product of landscapes:
- std::cout << "Scalar product : " << l1.compute_scalar_product(l2) << std::endl;
+ std::clog << "Scalar product : " << l1.compute_scalar_product(l2) << std::endl;
// here is how to create a file which is suitable for visualization via gnuplot:
average.plot("average_landscape");
diff --git a/src/Persistence_representations/example/persistence_landscape_on_grid.cpp b/src/Persistence_representations/example/persistence_landscape_on_grid.cpp
index 16a58e1d..6d58e167 100644
--- a/src/Persistence_representations/example/persistence_landscape_on_grid.cpp
+++ b/src/Persistence_representations/example/persistence_landscape_on_grid.cpp
@@ -37,31 +37,31 @@ int main(int argc, char** argv) {
Persistence_landscape_on_grid l2(persistence2, 0, 11, 20);
// This is how to compute integral of landscapes:
- std::cout << "Integral of the first landscape : " << l1.compute_integral_of_landscape() << std::endl;
- std::cout << "Integral of the second landscape : " << l2.compute_integral_of_landscape() << std::endl;
+ std::clog << "Integral of the first landscape : " << l1.compute_integral_of_landscape() << std::endl;
+ std::clog << "Integral of the second landscape : " << l2.compute_integral_of_landscape() << std::endl;
// And here how to write landscapes to stream:
- std::cout << "l1 : " << l1 << std::endl;
- std::cout << "l2 : " << l2 << std::endl;
+ std::clog << "l1 : " << l1 << std::endl;
+ std::clog << "l2 : " << l2 << std::endl;
// here are the maxima of the functions:
- std::cout << "Maximum of l1 : " << l1.compute_maximum() << std::endl;
- std::cout << "Maximum of l2 : " << l2.compute_maximum() << std::endl;
+ std::clog << "Maximum of l1 : " << l1.compute_maximum() << std::endl;
+ std::clog << "Maximum of l2 : " << l2.compute_maximum() << std::endl;
// here are the norms of landscapes:
- std::cout << "L^1 Norm of l1 : " << l1.compute_norm_of_landscape(1.) << std::endl;
- std::cout << "L^1 Norm of l2 : " << l2.compute_norm_of_landscape(1.) << std::endl;
+ std::clog << "L^1 Norm of l1 : " << l1.compute_norm_of_landscape(1.) << std::endl;
+ std::clog << "L^1 Norm of l2 : " << l2.compute_norm_of_landscape(1.) << std::endl;
// here is the average of landscapes:
Persistence_landscape_on_grid average;
average.compute_average({&l1, &l2});
- std::cout << "average : " << average << std::endl;
+ std::clog << "average : " << average << std::endl;
// here is the distance of landscapes:
- std::cout << "Distance : " << l1.distance(l2) << std::endl;
+ std::clog << "Distance : " << l1.distance(l2) << std::endl;
// here is the scalar product of landscapes:
- std::cout << "Scalar product : " << l1.compute_scalar_product(l2) << std::endl;
+ std::clog << "Scalar product : " << l1.compute_scalar_product(l2) << std::endl;
// here is how to create a file which is suitable for visualization via gnuplot:
average.plot("average_landscape");
diff --git a/src/Persistence_representations/example/persistence_vectors.cpp b/src/Persistence_representations/example/persistence_vectors.cpp
index b27e52d2..89e2fb83 100644
--- a/src/Persistence_representations/example/persistence_vectors.cpp
+++ b/src/Persistence_representations/example/persistence_vectors.cpp
@@ -41,19 +41,19 @@ int main(int argc, char** argv) {
Vector_distances_in_diagram v2(persistence2, std::numeric_limits<size_t>::max());
// writing to a stream:
- std::cout << "v1 : " << v1 << std::endl;
- std::cout << "v2 : " << v2 << std::endl;
+ std::clog << "v1 : " << v1 << std::endl;
+ std::clog << "v2 : " << v2 << std::endl;
// averages:
Vector_distances_in_diagram average;
average.compute_average({&v1, &v2});
- std::cout << "Average : " << average << std::endl;
+ std::clog << "Average : " << average << std::endl;
// computations of distances:
- std::cout << "l^1 distance : " << v1.distance(v2) << std::endl;
+ std::clog << "l^1 distance : " << v1.distance(v2) << std::endl;
// computations of scalar product:
- std::cout << "Scalar product of l1 and l2 : " << v1.compute_scalar_product(v2) << std::endl;
+ std::clog << "Scalar product of l1 and l2 : " << v1.compute_scalar_product(v2) << std::endl;
// create a file with a gnuplot script:
v1.plot("plot_of_vector_representation");
diff --git a/src/Persistence_representations/example/sliced_wasserstein.cpp b/src/Persistence_representations/example/sliced_wasserstein.cpp
index d5414d00..d4e31ebf 100644
--- a/src/Persistence_representations/example/sliced_wasserstein.cpp
+++ b/src/Persistence_representations/example/sliced_wasserstein.cpp
@@ -38,10 +38,10 @@ int main(int argc, char** argv) {
SW swex1(persistence1, 1, -1);
SW swex2(persistence2, 1, -1);
- std::cout << "Approx SW kernel: " << sw1.compute_scalar_product(sw2) << std::endl;
- std::cout << "Exact SW kernel: " << swex1.compute_scalar_product(swex2) << std::endl;
- std::cout << "Distance induced by approx SW kernel: " << sw1.distance(sw2) << std::endl;
- std::cout << "Distance induced by exact SW kernel: " << swex1.distance(swex2) << std::endl;
+ std::clog << "Approx SW kernel: " << sw1.compute_scalar_product(sw2) << std::endl;
+ std::clog << "Exact SW kernel: " << swex1.compute_scalar_product(swex2) << std::endl;
+ std::clog << "Distance induced by approx SW kernel: " << sw1.distance(sw2) << std::endl;
+ std::clog << "Distance induced by exact SW kernel: " << swex1.distance(swex2) << std::endl;
return 0;
}
diff --git a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h
index b1af3503..fab88489 100644
--- a/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h
+++ b/src/Persistence_representations/include/gudhi/Persistence_heat_maps.h
@@ -55,9 +55,9 @@ std::vector<std::vector<double> > create_Gaussian_filter(size_t pixel_radius, do
}
if (dbg) {
- std::cerr << "Kernel initialize \n";
- std::cerr << "pixel_radius : " << pixel_radius << std::endl;
- std::cerr << "kernel.size() : " << kernel.size() << std::endl;
+ std::clog << "Kernel initialize \n";
+ std::clog << "pixel_radius : " << pixel_radius << std::endl;
+ std::clog << "kernel.size() : " << kernel.size() << std::endl;
getchar();
}
@@ -79,12 +79,12 @@ std::vector<std::vector<double> > create_Gaussian_filter(size_t pixel_radius, do
}
if (dbg) {
- std::cerr << "Here is the kernel : \n";
+ std::clog << "Here is the kernel : \n";
for (size_t i = 0; i != kernel.size(); ++i) {
for (size_t j = 0; j != kernel[i].size(); ++j) {
- std::cerr << kernel[i][j] << " ";
+ std::clog << kernel[i][j] << " ";
}
- std::cerr << std::endl;
+ std::clog << std::endl;
}
}
return kernel;
@@ -290,16 +290,16 @@ class Persistence_heat_maps {
bool dbg = false;
if (this->heat_map.size() != second.heat_map.size()) {
if (dbg)
- std::cerr << "this->heat_map.size() : " << this->heat_map.size()
+ std::clog << "this->heat_map.size() : " << this->heat_map.size()
<< " \n second.heat_map.size() : " << second.heat_map.size() << std::endl;
return false;
}
if (this->min_ != second.min_) {
- if (dbg) std::cerr << "this->min_ : " << this->min_ << ", second.min_ : " << second.min_ << std::endl;
+ if (dbg) std::clog << "this->min_ : " << this->min_ << ", second.min_ : " << second.min_ << std::endl;
return false;
}
if (this->max_ != second.max_) {
- if (dbg) std::cerr << "this->max_ : " << this->max_ << ", second.max_ : " << second.max_ << std::endl;
+ if (dbg) std::clog << "this->max_ : " << this->max_ << ", second.max_ : " << second.max_ << std::endl;
return false;
}
// in the other case we may assume that the persistence images are defined on the same domain.
@@ -322,15 +322,15 @@ class Persistence_heat_maps {
bool operator==(const Persistence_heat_maps& rhs) const {
bool dbg = false;
if (!this->check_if_the_same(rhs)) {
- if (dbg) std::cerr << "The domains are not the same \n";
+ if (dbg) std::clog << "The domains are not the same \n";
return false; // in this case, the domains are not the same, so the maps cannot be the same.
}
for (size_t i = 0; i != this->heat_map.size(); ++i) {
for (size_t j = 0; j != this->heat_map[i].size(); ++j) {
if (!almost_equal(this->heat_map[i][j], rhs.heat_map[i][j])) {
if (dbg) {
- std::cerr << "this->heat_map[" << i << "][" << j << "] = " << this->heat_map[i][j] << std::endl;
- std::cerr << "rhs.heat_map[" << i << "][" << j << "] = " << rhs.heat_map[i][j] << std::endl;
+ std::clog << "this->heat_map[" << i << "][" << j << "] = " << this->heat_map[i][j] << std::endl;
+ std::clog << "rhs.heat_map[" << i << "][" << j << "] = " << rhs.heat_map[i][j] << std::endl;
}
return false;
}
@@ -586,14 +586,14 @@ void Persistence_heat_maps<Scalling_of_kernels>::construct(const std::vector<std
bool erase_below_diagonal, size_t number_of_pixels,
double min_, double max_) {
bool dbg = false;
- if (dbg) std::cerr << "Entering construct procedure \n";
+ if (dbg) std::clog << "Entering construct procedure \n";
Scalling_of_kernels f;
this->f = f;
- if (dbg) std::cerr << "min and max passed to construct() procedure: " << min_ << " " << max_ << std::endl;
+ if (dbg) std::clog << "min and max passed to construct() procedure: " << min_ << " " << max_ << std::endl;
if (min_ == max_) {
- if (dbg) std::cerr << "min and max parameters will be determined based on intervals \n";
+ if (dbg) std::clog << "min and max parameters will be determined based on intervals \n";
// in this case, we want the program to set up the min_ and max_ values by itself.
min_ = std::numeric_limits<int>::max();
max_ = -std::numeric_limits<int>::max();
@@ -611,9 +611,9 @@ void Persistence_heat_maps<Scalling_of_kernels>::construct(const std::vector<std
}
if (dbg) {
- std::cerr << "min_ : " << min_ << std::endl;
- std::cerr << "max_ : " << max_ << std::endl;
- std::cerr << "number_of_pixels : " << number_of_pixels << std::endl;
+ std::clog << "min_ : " << min_ << std::endl;
+ std::clog << "max_ : " << max_ << std::endl;
+ std::clog << "number_of_pixels : " << number_of_pixels << std::endl;
getchar();
}
@@ -628,7 +628,7 @@ void Persistence_heat_maps<Scalling_of_kernels>::construct(const std::vector<std
}
this->heat_map = heat_map_;
- if (dbg) std::cerr << "Done creating of the heat map, now we will fill in the structure \n";
+ if (dbg) std::clog << "Done creating of the heat map, now we will fill in the structure \n";
for (size_t pt_nr = 0; pt_nr != intervals_.size(); ++pt_nr) {
// compute the value of intervals_[pt_nr] in the grid:
@@ -638,9 +638,9 @@ void Persistence_heat_maps<Scalling_of_kernels>::construct(const std::vector<std
static_cast<int>((intervals_[pt_nr].second - this->min_) / (this->max_ - this->min_) * number_of_pixels);
if (dbg) {
- std::cerr << "point : " << intervals_[pt_nr].first << " , " << intervals_[pt_nr].second << std::endl;
- std::cerr << "x_grid : " << x_grid << std::endl;
- std::cerr << "y_grid : " << y_grid << std::endl;
+ std::clog << "point : " << intervals_[pt_nr].first << " , " << intervals_[pt_nr].second << std::endl;
+ std::clog << "x_grid : " << x_grid << std::endl;
+ std::clog << "y_grid : " << y_grid << std::endl;
}
// x_grid and y_grid gives a center of the kernel. We want to have its lower left corner. To get this, we need to
@@ -650,9 +650,9 @@ void Persistence_heat_maps<Scalling_of_kernels>::construct(const std::vector<std
// note that the numbers x_grid and y_grid may be negative.
if (dbg) {
- std::cerr << "After shift : \n";
- std::cerr << "x_grid : " << x_grid << std::endl;
- std::cerr << "y_grid : " << y_grid << std::endl;
+ std::clog << "After shift : \n";
+ std::clog << "x_grid : " << x_grid << std::endl;
+ std::clog << "y_grid : " << y_grid << std::endl;
}
double scaling_value = this->f(intervals_[pt_nr]);
@@ -663,11 +663,11 @@ void Persistence_heat_maps<Scalling_of_kernels>::construct(const std::vector<std
if (((x_grid + i) >= 0) && (x_grid + i < this->heat_map.size()) && ((y_grid + j) >= 0) &&
(y_grid + j < this->heat_map.size())) {
if (dbg) {
- std::cerr << y_grid + j << " " << x_grid + i << std::endl;
+ std::clog << y_grid + j << " " << x_grid + i << std::endl;
}
this->heat_map[y_grid + j][x_grid + i] += scaling_value * filter[i][j];
if (dbg) {
- std::cerr << "Position : (" << x_grid + i << "," << y_grid + j
+ std::clog << "Position : (" << x_grid + i << "," << y_grid + j
<< ") got increased by the value : " << filter[i][j] << std::endl;
}
}
@@ -805,7 +805,7 @@ void Persistence_heat_maps<Scalling_of_kernels>::plot(const char* filename) cons
out << std::endl;
}
out.close();
- std::cout << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
+ std::clog << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
<< gnuplot_script.str().c_str() << "\'\"" << std::endl;
}
@@ -842,7 +842,7 @@ void Persistence_heat_maps<Scalling_of_kernels>::load_from_file(const char* file
in >> this->min_ >> this->max_;
if (dbg) {
- std::cerr << "Reading the following values of min and max : " << this->min_ << " , " << this->max_ << std::endl;
+ std::clog << "Reading the following values of min and max : " << this->min_ << " , " << this->max_ << std::endl;
}
std::string temp;
@@ -859,18 +859,18 @@ void Persistence_heat_maps<Scalling_of_kernels>::load_from_file(const char* file
lineSS >> point;
line_of_heat_map.push_back(point);
if (dbg) {
- std::cout << point << " ";
+ std::clog << point << " ";
}
}
if (dbg) {
- std::cout << std::endl;
+ std::clog << std::endl;
getchar();
}
if (in.good()) this->heat_map.push_back(line_of_heat_map);
}
in.close();
- if (dbg) std::cout << "Done \n";
+ if (dbg) std::clog << "Done \n";
}
// Concretizations of virtual methods:
@@ -878,7 +878,7 @@ template <typename Scalling_of_kernels>
std::vector<double> Persistence_heat_maps<Scalling_of_kernels>::vectorize(int number_of_function) const {
std::vector<double> result;
if (!discrete) {
- std::cout << "No vectorize method in case of infinite dimensional vectorization" << std::endl;
+ std::cerr << "No vectorize method in case of infinite dimensional vectorization" << std::endl;
return result;
}
diff --git a/src/Persistence_representations/include/gudhi/Persistence_intervals.h b/src/Persistence_representations/include/gudhi/Persistence_intervals.h
index ea4220ea..a6c1d6f0 100644
--- a/src/Persistence_representations/include/gudhi/Persistence_intervals.h
+++ b/src/Persistence_representations/include/gudhi/Persistence_intervals.h
@@ -185,7 +185,7 @@ class Persistence_intervals {
out.close();
- std::cout << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
+ std::clog << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
<< gnuplot_script.str().c_str() << "\'\"" << std::endl;
}
@@ -293,7 +293,7 @@ std::vector<std::pair<double, double> > Persistence_intervals::dominant_interval
for (size_t i = 0; i != std::min(where_to_cut, position_length_vector.size()); ++i) {
result.push_back(this->intervals[position_length_vector[i].first]);
if (dbg)
- std::cerr << "Position : " << position_length_vector[i].first << " length : " << position_length_vector[i].second
+ std::clog << "Position : " << position_length_vector[i].first << " length : " << position_length_vector[i].second
<< std::endl;
}
@@ -303,7 +303,7 @@ std::vector<std::pair<double, double> > Persistence_intervals::dominant_interval
std::vector<size_t> Persistence_intervals::histogram_of_lengths(size_t number_of_bins) const {
bool dbg = false;
- if (dbg) std::cerr << "this->intervals.size() : " << this->intervals.size() << std::endl;
+ if (dbg) std::clog << "this->intervals.size() : " << this->intervals.size() << std::endl;
// first find the length of the longest interval:
double lengthOfLongest = 0;
for (size_t i = 0; i != this->intervals.size(); ++i) {
@@ -313,7 +313,7 @@ std::vector<size_t> Persistence_intervals::histogram_of_lengths(size_t number_of
}
if (dbg) {
- std::cerr << "lengthOfLongest : " << lengthOfLongest << std::endl;
+ std::clog << "lengthOfLongest : " << lengthOfLongest << std::endl;
}
// this is a container we will use to store the resulting histogram
@@ -330,10 +330,10 @@ std::vector<size_t> Persistence_intervals::histogram_of_lengths(size_t number_of
++result[position];
if (dbg) {
- std::cerr << "i : " << i << std::endl;
- std::cerr << "Interval : [" << this->intervals[i].first << " , " << this->intervals[i].second << " ] \n";
- std::cerr << "relative_length_of_this_interval : " << relative_length_of_this_interval << std::endl;
- std::cerr << "position : " << position << std::endl;
+ std::clog << "i : " << i << std::endl;
+ std::clog << "Interval : [" << this->intervals[i].first << " , " << this->intervals[i].second << " ] \n";
+ std::clog << "relative_length_of_this_interval : " << relative_length_of_this_interval << std::endl;
+ std::clog << "position : " << position << std::endl;
getchar();
}
}
@@ -342,7 +342,7 @@ std::vector<size_t> Persistence_intervals::histogram_of_lengths(size_t number_of
result.resize(number_of_bins);
if (dbg) {
- for (size_t i = 0; i != result.size(); ++i) std::cerr << result[i] << std::endl;
+ for (size_t i = 0; i != result.size(); ++i) std::clog << result[i] << std::endl;
}
return result;
}
@@ -368,7 +368,7 @@ std::vector<double> Persistence_intervals::characteristic_function_of_diagram(do
for (size_t i = 0; i != this->intervals.size(); ++i) {
if (dbg) {
- std::cerr << "Interval : " << this->intervals[i].first << " , " << this->intervals[i].second << std::endl;
+ std::clog << "Interval : " << this->intervals[i].first << " , " << this->intervals[i].second << std::endl;
}
size_t beginIt = 0;
@@ -390,8 +390,8 @@ std::vector<double> Persistence_intervals::characteristic_function_of_diagram(do
}
if (dbg) {
- std::cerr << "beginIt : " << beginIt << std::endl;
- std::cerr << "endIt : " << endIt << std::endl;
+ std::clog << "beginIt : " << beginIt << std::endl;
+ std::clog << "endIt : " << endIt << std::endl;
}
for (size_t pos = beginIt; pos != endIt; ++pos) {
@@ -399,11 +399,11 @@ std::vector<double> Persistence_intervals::characteristic_function_of_diagram(do
(this->intervals[i].second - this->intervals[i].first);
}
if (dbg) {
- std::cerr << "Result at this stage \n";
+ std::clog << "Result at this stage \n";
for (size_t aa = 0; aa != result.size(); ++aa) {
- std::cerr << result[aa] << " ";
+ std::clog << result[aa] << " ";
}
- std::cerr << std::endl;
+ std::clog << std::endl;
}
}
return result;
@@ -455,9 +455,9 @@ inline double compute_euclidean_distance(const std::pair<double, double>& f, con
std::vector<double> Persistence_intervals::k_n_n(size_t k, size_t where_to_cut) const {
bool dbg = false;
if (dbg) {
- std::cerr << "Here are the intervals : \n";
+ std::clog << "Here are the intervals : \n";
for (size_t i = 0; i != this->intervals.size(); ++i) {
- std::cerr << "[ " << this->intervals[i].first << " , " << this->intervals[i].second << "] \n";
+ std::clog << "[ " << this->intervals[i].first << " , " << this->intervals[i].second << "] \n";
}
getchar();
}
@@ -486,12 +486,12 @@ std::vector<double> Persistence_intervals::k_n_n(size_t k, size_t where_to_cut)
distances_from_diagonal[i] = distanceToDiagonal;
if (dbg) {
- std::cerr << "Here are the distances form the point : [" << this->intervals[i].first << " , "
+ std::clog << "Here are the distances form the point : [" << this->intervals[i].first << " , "
<< this->intervals[i].second << "] in the diagram \n";
for (size_t aa = 0; aa != distancesFromI.size(); ++aa) {
- std::cerr << "To : " << i + aa << " : " << distancesFromI[aa] << " ";
+ std::clog << "To : " << i + aa << " : " << distancesFromI[aa] << " ";
}
- std::cerr << std::endl;
+ std::clog << std::endl;
getchar();
}
@@ -502,18 +502,18 @@ std::vector<double> Persistence_intervals::k_n_n(size_t k, size_t where_to_cut)
}
}
if (dbg) {
- std::cerr << "Here is the distance matrix : \n";
+ std::clog << "Here is the distance matrix : \n";
for (size_t i = 0; i != distances.size(); ++i) {
for (size_t j = 0; j != distances.size(); ++j) {
- std::cerr << distances[i][j] << " ";
+ std::clog << distances[i][j] << " ";
}
- std::cerr << std::endl;
+ std::clog << std::endl;
}
- std::cerr << std::endl << std::endl << "And here are the distances to the diagonal : " << std::endl;
+ std::clog << std::endl << std::endl << "And here are the distances to the diagonal : " << std::endl;
for (size_t i = 0; i != distances_from_diagonal.size(); ++i) {
- std::cerr << distances_from_diagonal[i] << " ";
+ std::clog << distances_from_diagonal[i] << " ";
}
- std::cerr << std::endl << std::endl;
+ std::clog << std::endl << std::endl;
getchar();
}
@@ -526,13 +526,13 @@ std::vector<double> Persistence_intervals::k_n_n(size_t k, size_t where_to_cut)
if (k > distancesFromI.size()) {
if (dbg) {
- std::cerr << "There are not enough neighbors in your set. We set the result to plus infty \n";
+ std::clog << "There are not enough neighbors in your set. We set the result to plus infty \n";
}
result.push_back(std::numeric_limits<double>::max());
} else {
if (distances_from_diagonal[i] > distancesFromI[k]) {
if (dbg) {
- std::cerr << "The k-th n.n. is on a diagonal. Therefore we set up a distance to diagonal \n";
+ std::clog << "The k-th n.n. is on a diagonal. Therefore we set up a distance to diagonal \n";
}
result.push_back(distances_from_diagonal[i]);
} else {
diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape.h b/src/Persistence_representations/include/gudhi/Persistence_landscape.h
index b819ccb6..ce4065b8 100644
--- a/src/Persistence_representations/include/gudhi/Persistence_landscape.h
+++ b/src/Persistence_representations/include/gudhi/Persistence_landscape.h
@@ -343,7 +343,7 @@ class Persistence_landscape {
bool dbg = false;
if (dbg) {
- std::cerr << "to_average.size() : " << to_average.size() << std::endl;
+ std::clog << "to_average.size() : " << to_average.size() << std::endl;
}
std::vector<Persistence_landscape*> nextLevelMerge(to_average.size());
@@ -357,13 +357,13 @@ class Persistence_landscape {
while (nextLevelMerge.size() != 1) {
if (dbg) {
- std::cerr << "nextLevelMerge.size() : " << nextLevelMerge.size() << std::endl;
+ std::clog << "nextLevelMerge.size() : " << nextLevelMerge.size() << std::endl;
}
std::vector<Persistence_landscape*> nextNextLevelMerge;
nextNextLevelMerge.reserve(to_average.size());
for (size_t i = 0; i < nextLevelMerge.size(); i = i + 2) {
if (dbg) {
- std::cerr << "i : " << i << std::endl;
+ std::clog << "i : " << i << std::endl;
}
Persistence_landscape* l = new Persistence_landscape;
if (i + 1 != nextLevelMerge.size()) {
@@ -374,7 +374,7 @@ class Persistence_landscape {
nextNextLevelMerge.push_back(l);
}
if (dbg) {
- std::cerr << "After this iteration \n";
+ std::clog << "After this iteration \n";
getchar();
}
@@ -471,25 +471,25 @@ Persistence_landscape::Persistence_landscape(const char* filename, size_t dimens
bool operatorEqualDbg = false;
bool Persistence_landscape::operator==(const Persistence_landscape& rhs) const {
if (this->land.size() != rhs.land.size()) {
- if (operatorEqualDbg) std::cerr << "1\n";
+ if (operatorEqualDbg) std::clog << "1\n";
return false;
}
for (size_t level = 0; level != this->land.size(); ++level) {
if (this->land[level].size() != rhs.land[level].size()) {
- if (operatorEqualDbg) std::cerr << "this->land[level].size() : " << this->land[level].size() << "\n";
- if (operatorEqualDbg) std::cerr << "rhs.land[level].size() : " << rhs.land[level].size() << "\n";
- if (operatorEqualDbg) std::cerr << "2\n";
+ if (operatorEqualDbg) std::clog << "this->land[level].size() : " << this->land[level].size() << "\n";
+ if (operatorEqualDbg) std::clog << "rhs.land[level].size() : " << rhs.land[level].size() << "\n";
+ if (operatorEqualDbg) std::clog << "2\n";
return false;
}
for (size_t i = 0; i != this->land[level].size(); ++i) {
if (!(almost_equal(this->land[level][i].first, rhs.land[level][i].first) &&
almost_equal(this->land[level][i].second, rhs.land[level][i].second))) {
if (operatorEqualDbg)
- std::cerr << "this->land[level][i] : " << this->land[level][i].first << " " << this->land[level][i].second
+ std::clog << "this->land[level][i] : " << this->land[level][i].first << " " << this->land[level][i].second
<< "\n";
if (operatorEqualDbg)
- std::cerr << "rhs.land[level][i] : " << rhs.land[level][i].first << " " << rhs.land[level][i].second << "\n";
- if (operatorEqualDbg) std::cerr << "3\n";
+ std::clog << "rhs.land[level][i] : " << rhs.land[level][i].first << " " << rhs.land[level][i].second << "\n";
+ if (operatorEqualDbg) std::clog << "3\n";
return false;
}
}
@@ -507,7 +507,7 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
const std::vector<std::pair<double, double> >& p, size_t number_of_levels) {
bool dbg = false;
if (dbg) {
- std::cerr << "Persistence_landscape::Persistence_landscape( const std::vector< std::pair< double , double > >& p )"
+ std::clog << "Persistence_landscape::Persistence_landscape( const std::vector< std::pair< double , double > >& p )"
<< std::endl;
}
@@ -517,9 +517,9 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
std::sort(bars.begin(), bars.end(), compare_points_sorting);
if (dbg) {
- std::cerr << "Bars : \n";
+ std::clog << "Bars : \n";
for (size_t i = 0; i != bars.size(); ++i) {
- std::cerr << bars[i].first << " " << bars[i].second << "\n";
+ std::clog << bars[i].first << " " << bars[i].second << "\n";
}
getchar();
}
@@ -534,7 +534,7 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
while (!characteristicPoints.empty()) {
if (dbg) {
for (size_t i = 0; i != characteristicPoints.size(); ++i) {
- std::cout << "(" << characteristicPoints[i].first << " " << characteristicPoints[i].second << ")\n";
+ std::clog << "(" << characteristicPoints[i].first << " " << characteristicPoints[i].second << ")\n";
}
std::cin.ignore();
}
@@ -545,7 +545,7 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
lambda_n.push_back(characteristicPoints[0]);
if (dbg) {
- std::cerr << "1 Adding to lambda_n : (" << -std::numeric_limits<int>::max() << " " << 0 << ") , ("
+ std::clog << "1 Adding to lambda_n : (" << -std::numeric_limits<int>::max() << " " << 0 << ") , ("
<< minus_length(characteristicPoints[0]) << " " << 0 << ") , (" << characteristicPoints[0].first << " "
<< characteristicPoints[0].second << ") \n";
}
@@ -562,13 +562,13 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
(birth_plus_deaths(lambda_n[lambda_n.size() - 1]) - minus_length(characteristicPoints[i])) / 2);
lambda_n.push_back(point);
if (dbg) {
- std::cerr << "2 Adding to lambda_n : (" << point.first << " " << point.second << ")\n";
+ std::clog << "2 Adding to lambda_n : (" << point.first << " " << point.second << ")\n";
}
if (dbg) {
- std::cerr << "characteristicPoints[i+p] : " << characteristicPoints[i + p].first << " "
+ std::clog << "characteristicPoints[i+p] : " << characteristicPoints[i + p].first << " "
<< characteristicPoints[i + p].second << "\n";
- std::cerr << "point : " << point.first << " " << point.second << "\n";
+ std::clog << "point : " << point.first << " " << point.second << "\n";
getchar();
}
@@ -577,7 +577,7 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
(birth_plus_deaths(point) <= birth_plus_deaths(characteristicPoints[i + p]))) {
newCharacteristicPoints.push_back(characteristicPoints[i + p]);
if (dbg) {
- std::cerr << "3.5 Adding to newCharacteristicPoints : (" << characteristicPoints[i + p].first << " "
+ std::clog << "3.5 Adding to newCharacteristicPoints : (" << characteristicPoints[i + p].first << " "
<< characteristicPoints[i + p].second << ")\n";
getchar();
}
@@ -586,7 +586,7 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
newCharacteristicPoints.push_back(point);
if (dbg) {
- std::cerr << "4 Adding to newCharacteristicPoints : (" << point.first << " " << point.second << ")\n";
+ std::clog << "4 Adding to newCharacteristicPoints : (" << point.first << " " << point.second << ")\n";
}
while ((i + p < characteristicPoints.size()) &&
@@ -594,15 +594,15 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
(birth_plus_deaths(point) >= birth_plus_deaths(characteristicPoints[i + p]))) {
newCharacteristicPoints.push_back(characteristicPoints[i + p]);
if (dbg) {
- std::cerr << "characteristicPoints[i+p] : " << characteristicPoints[i + p].first << " "
+ std::clog << "characteristicPoints[i+p] : " << characteristicPoints[i + p].first << " "
<< characteristicPoints[i + p].second << "\n";
- std::cerr << "point : " << point.first << " " << point.second << "\n";
- std::cerr << "characteristicPoints[i+p] birth and death : " << minus_length(characteristicPoints[i + p])
+ std::clog << "point : " << point.first << " " << point.second << "\n";
+ std::clog << "characteristicPoints[i+p] birth and death : " << minus_length(characteristicPoints[i + p])
<< " , " << birth_plus_deaths(characteristicPoints[i + p]) << "\n";
- std::cerr << "point birth and death : " << minus_length(point) << " , " << birth_plus_deaths(point)
+ std::clog << "point birth and death : " << minus_length(point) << " , " << birth_plus_deaths(point)
<< "\n";
- std::cerr << "3 Adding to newCharacteristicPoints : (" << characteristicPoints[i + p].first << " "
+ std::clog << "3 Adding to newCharacteristicPoints : (" << characteristicPoints[i + p].first << " "
<< characteristicPoints[i + p].second << ")\n";
getchar();
}
@@ -613,20 +613,20 @@ void Persistence_landscape::construct_persistence_landscape_from_barcode(
lambda_n.push_back(std::make_pair(birth_plus_deaths(lambda_n[lambda_n.size() - 1]), 0));
lambda_n.push_back(std::make_pair(minus_length(characteristicPoints[i]), 0));
if (dbg) {
- std::cerr << "5 Adding to lambda_n : (" << birth_plus_deaths(lambda_n[lambda_n.size() - 1]) << " " << 0
+ std::clog << "5 Adding to lambda_n : (" << birth_plus_deaths(lambda_n[lambda_n.size() - 1]) << " " << 0
<< ")\n";
- std::cerr << "5 Adding to lambda_n : (" << minus_length(characteristicPoints[i]) << " " << 0 << ")\n";
+ std::clog << "5 Adding to lambda_n : (" << minus_length(characteristicPoints[i]) << " " << 0 << ")\n";
}
}
lambda_n.push_back(characteristicPoints[i]);
if (dbg) {
- std::cerr << "6 Adding to lambda_n : (" << characteristicPoints[i].first << " "
+ std::clog << "6 Adding to lambda_n : (" << characteristicPoints[i].first << " "
<< characteristicPoints[i].second << ")\n";
}
} else {
newCharacteristicPoints.push_back(characteristicPoints[i]);
if (dbg) {
- std::cerr << "7 Adding to newCharacteristicPoints : (" << characteristicPoints[i].first << " "
+ std::clog << "7 Adding to newCharacteristicPoints : (" << characteristicPoints[i].first << " "
<< characteristicPoints[i].second << ")\n";
}
}
@@ -692,7 +692,7 @@ double Persistence_landscape::compute_integral_of_landscape(double p) const {
double result = 0;
for (size_t i = 0; i != this->land.size(); ++i) {
for (size_t nr = 2; nr != this->land[i].size() - 1; ++nr) {
- if (dbg) std::cout << "nr : " << nr << "\n";
+ if (dbg) std::clog << "nr : " << nr << "\n";
// In this interval, the landscape has a form f(x) = ax+b. We want to compute integral of (ax+b)^p = 1/a *
// (ax+b)^{p+1}/(p+1)
std::pair<double, double> coef = compute_parameters_of_a_line(this->land[i][nr], this->land[i][nr - 1]);
@@ -700,7 +700,7 @@ double Persistence_landscape::compute_integral_of_landscape(double p) const {
double b = coef.second;
if (dbg)
- std::cout << "(" << this->land[i][nr].first << "," << this->land[i][nr].second << ") , "
+ std::clog << "(" << this->land[i][nr].first << "," << this->land[i][nr].second << ") , "
<< this->land[i][nr - 1].first << "," << this->land[i][nr].second << ")" << std::endl;
if (this->land[i][nr].first == this->land[i][nr - 1].first) continue;
if (a != 0) {
@@ -710,8 +710,8 @@ double Persistence_landscape::compute_integral_of_landscape(double p) const {
result += (this->land[i][nr].first - this->land[i][nr - 1].first) * (pow(this->land[i][nr].second, p));
}
if (dbg) {
- std::cout << "a : " << a << " , b : " << b << std::endl;
- std::cout << "result : " << result << std::endl;
+ std::clog << "a : " << a << " , b : " << b << std::endl;
+ std::clog << "result : " << result << std::endl;
}
}
}
@@ -730,31 +730,31 @@ double Persistence_landscape::compute_value_at_a_given_point(unsigned level, dou
unsigned coordEnd = this->land[level].size() - 2;
if (compute_value_at_a_given_pointDbg) {
- std::cerr << "Here \n";
- std::cerr << "x : " << x << "\n";
- std::cerr << "this->land[level][coordBegin].first : " << this->land[level][coordBegin].first << "\n";
- std::cerr << "this->land[level][coordEnd].first : " << this->land[level][coordEnd].first << "\n";
+ std::clog << "Here \n";
+ std::clog << "x : " << x << "\n";
+ std::clog << "this->land[level][coordBegin].first : " << this->land[level][coordBegin].first << "\n";
+ std::clog << "this->land[level][coordEnd].first : " << this->land[level][coordEnd].first << "\n";
}
// in this case x is outside the support of the landscape, therefore the value of the landscape is 0.
if (x <= this->land[level][coordBegin].first) return 0;
if (x >= this->land[level][coordEnd].first) return 0;
- if (compute_value_at_a_given_pointDbg) std::cerr << "Entering to the while loop \n";
+ if (compute_value_at_a_given_pointDbg) std::clog << "Entering to the while loop \n";
while (coordBegin + 1 != coordEnd) {
if (compute_value_at_a_given_pointDbg) {
- std::cerr << "coordBegin : " << coordBegin << "\n";
- std::cerr << "coordEnd : " << coordEnd << "\n";
- std::cerr << "this->land[level][coordBegin].first : " << this->land[level][coordBegin].first << "\n";
- std::cerr << "this->land[level][coordEnd].first : " << this->land[level][coordEnd].first << "\n";
+ std::clog << "coordBegin : " << coordBegin << "\n";
+ std::clog << "coordEnd : " << coordEnd << "\n";
+ std::clog << "this->land[level][coordBegin].first : " << this->land[level][coordBegin].first << "\n";
+ std::clog << "this->land[level][coordEnd].first : " << this->land[level][coordEnd].first << "\n";
}
unsigned newCord = (unsigned)floor((coordEnd + coordBegin) / 2.0);
if (compute_value_at_a_given_pointDbg) {
- std::cerr << "newCord : " << newCord << "\n";
- std::cerr << "this->land[level][newCord].first : " << this->land[level][newCord].first << "\n";
+ std::clog << "newCord : " << newCord << "\n";
+ std::clog << "this->land[level][newCord].first : " << this->land[level][newCord].first << "\n";
std::cin.ignore();
}
@@ -767,12 +767,12 @@ double Persistence_landscape::compute_value_at_a_given_point(unsigned level, dou
}
if (compute_value_at_a_given_pointDbg) {
- std::cout << "x : " << x << " is between : " << this->land[level][coordBegin].first << " a "
+ std::clog << "x : " << x << " is between : " << this->land[level][coordBegin].first << " a "
<< this->land[level][coordEnd].first << "\n";
- std::cout << "the y coords are : " << this->land[level][coordBegin].second << " a "
+ std::clog << "the y coords are : " << this->land[level][coordBegin].second << " a "
<< this->land[level][coordEnd].second << "\n";
- std::cerr << "coordBegin : " << coordBegin << "\n";
- std::cerr << "coordEnd : " << coordEnd << "\n";
+ std::clog << "coordBegin : " << coordBegin << "\n";
+ std::clog << "coordEnd : " << coordEnd << "\n";
std::cin.ignore();
}
return function_value(this->land[level][coordBegin], this->land[level][coordEnd], x);
@@ -810,13 +810,13 @@ Persistence_landscape Persistence_landscape::abs() {
Persistence_landscape result;
for (size_t level = 0; level != this->land.size(); ++level) {
if (AbsDbg) {
- std::cout << "level: " << level << std::endl;
+ std::clog << "level: " << level << std::endl;
}
std::vector<std::pair<double, double> > lambda_n;
lambda_n.push_back(std::make_pair(-std::numeric_limits<int>::max(), 0));
for (size_t i = 1; i != this->land[level].size(); ++i) {
if (AbsDbg) {
- std::cout << "this->land[" << level << "][" << i << "] : " << this->land[level][i].first << " "
+ std::clog << "this->land[" << level << "][" << i << "] : " << this->land[level][i].first << " "
<< this->land[level][i].second << std::endl;
}
// if a line segment between this->land[level][i-1] and this->land[level][i] crosses the x-axis, then we have to
@@ -828,15 +828,15 @@ Persistence_landscape Persistence_landscape::abs() {
lambda_n.push_back(std::make_pair(zero, 0));
lambda_n.push_back(std::make_pair(this->land[level][i].first, fabs(this->land[level][i].second)));
if (AbsDbg) {
- std::cout << "Adding pair : (" << zero << ",0)" << std::endl;
- std::cout << "In the same step adding pair : (" << this->land[level][i].first << ","
+ std::clog << "Adding pair : (" << zero << ",0)" << std::endl;
+ std::clog << "In the same step adding pair : (" << this->land[level][i].first << ","
<< fabs(this->land[level][i].second) << ") " << std::endl;
std::cin.ignore();
}
} else {
lambda_n.push_back(std::make_pair(this->land[level][i].first, fabs(this->land[level][i].second)));
if (AbsDbg) {
- std::cout << "Adding pair : (" << this->land[level][i].first << "," << fabs(this->land[level][i].second)
+ std::clog << "Adding pair : (" << this->land[level][i].first << "," << fabs(this->land[level][i].second)
<< ") " << std::endl;
std::cin.ignore();
}
@@ -851,13 +851,13 @@ Persistence_landscape* Persistence_landscape::new_abs() {
Persistence_landscape* result = new Persistence_landscape(*this);
for (size_t level = 0; level != this->land.size(); ++level) {
if (AbsDbg) {
- std::cout << "level: " << level << std::endl;
+ std::clog << "level: " << level << std::endl;
}
std::vector<std::pair<double, double> > lambda_n;
lambda_n.push_back(std::make_pair(-std::numeric_limits<int>::max(), 0));
for (size_t i = 1; i != this->land[level].size(); ++i) {
if (AbsDbg) {
- std::cout << "this->land[" << level << "][" << i << "] : " << this->land[level][i].first << " "
+ std::clog << "this->land[" << level << "][" << i << "] : " << this->land[level][i].first << " "
<< this->land[level][i].second << std::endl;
}
// if a line segment between this->land[level][i-1] and this->land[level][i] crosses the x-axis, then we have to
@@ -869,15 +869,15 @@ Persistence_landscape* Persistence_landscape::new_abs() {
lambda_n.push_back(std::make_pair(zero, 0));
lambda_n.push_back(std::make_pair(this->land[level][i].first, fabs(this->land[level][i].second)));
if (AbsDbg) {
- std::cout << "Adding pair : (" << zero << ",0)" << std::endl;
- std::cout << "In the same step adding pair : (" << this->land[level][i].first << ","
+ std::clog << "Adding pair : (" << zero << ",0)" << std::endl;
+ std::clog << "In the same step adding pair : (" << this->land[level][i].first << ","
<< fabs(this->land[level][i].second) << ") " << std::endl;
std::cin.ignore();
}
} else {
lambda_n.push_back(std::make_pair(this->land[level][i].first, fabs(this->land[level][i].second)));
if (AbsDbg) {
- std::cout << "Adding pair : (" << this->land[level][i].first << "," << fabs(this->land[level][i].second)
+ std::clog << "Adding pair : (" << this->land[level][i].first << "," << fabs(this->land[level][i].second)
<< ") " << std::endl;
std::cin.ignore();
}
@@ -943,11 +943,11 @@ void Persistence_landscape::load_landscape_from_file(const char* filename) {
lineSS >> endd;
landscapeAtThisLevel.push_back(std::make_pair(beginn, endd));
if (dbg) {
- std::cerr << "Reading a point : " << beginn << " , " << endd << std::endl;
+ std::clog << "Reading a point : " << beginn << " , " << endd << std::endl;
}
} else {
if (dbg) {
- std::cout << "IGNORE LINE\n";
+ std::clog << "IGNORE LINE\n";
getchar();
}
if (!isThisAFirsLine) {
@@ -975,7 +975,7 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
const Persistence_landscape& land2) {
bool operation_on_pair_of_landscapesDBG = false;
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "operation_on_pair_of_landscapes\n";
+ std::clog << "operation_on_pair_of_landscapes\n";
std::cin.ignore();
}
Persistence_landscape result;
@@ -985,8 +985,8 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
if (operation_on_pair_of_landscapesDBG) {
for (size_t i = 0; i != std::min(land1.land.size(), land2.land.size()); ++i) {
- std::cerr << "land1.land[" << i << "].size() : " << land1.land[i].size() << std::endl;
- std::cerr << "land2.land[" << i << "].size() : " << land2.land[i].size() << std::endl;
+ std::clog << "land1.land[" << i << "].size() : " << land1.land[i].size() << std::endl;
+ std::clog << "land2.land[" << i << "].size() : " << land2.land[i].size() << std::endl;
}
getchar();
}
@@ -997,20 +997,20 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
size_t q = 0;
while ((p + 1 < land1.land[i].size()) && (q + 1 < land2.land[i].size())) {
if (operation_on_pair_of_landscapesDBG) {
- std::cerr << "p : " << p << "\n";
- std::cerr << "q : " << q << "\n";
- std::cerr << "land1.land.size() : " << land1.land.size() << std::endl;
- std::cerr << "land2.land.size() : " << land2.land.size() << std::endl;
- std::cerr << "land1.land[" << i << "].size() : " << land1.land[i].size() << std::endl;
- std::cerr << "land2.land[" << i << "].size() : " << land2.land[i].size() << std::endl;
- std::cout << "land1.land[i][p].first : " << land1.land[i][p].first << "\n";
- std::cout << "land2.land[i][q].first : " << land2.land[i][q].first << "\n";
+ std::clog << "p : " << p << "\n";
+ std::clog << "q : " << q << "\n";
+ std::clog << "land1.land.size() : " << land1.land.size() << std::endl;
+ std::clog << "land2.land.size() : " << land2.land.size() << std::endl;
+ std::clog << "land1.land[" << i << "].size() : " << land1.land[i].size() << std::endl;
+ std::clog << "land2.land[" << i << "].size() : " << land2.land[i].size() << std::endl;
+ std::clog << "land1.land[i][p].first : " << land1.land[i][p].first << "\n";
+ std::clog << "land2.land[i][q].first : " << land2.land[i][q].first << "\n";
}
if (land1.land[i][p].first < land2.land[i][q].first) {
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "first \n";
- std::cout << " function_value(land2.land[i][q-1],land2.land[i][q],land1.land[i][p].first) : "
+ std::clog << "first \n";
+ std::clog << " function_value(land2.land[i][q-1],land2.land[i][q],land1.land[i][p].first) : "
<< function_value(land2.land[i][q - 1], land2.land[i][q], land1.land[i][p].first) << "\n";
}
lambda_n.push_back(
@@ -1022,12 +1022,12 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
}
if (land1.land[i][p].first > land2.land[i][q].first) {
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "Second \n";
- std::cout << "function_value(" << land1.land[i][p - 1].first << " " << land1.land[i][p - 1].second << " ,"
+ std::clog << "Second \n";
+ std::clog << "function_value(" << land1.land[i][p - 1].first << " " << land1.land[i][p - 1].second << " ,"
<< land1.land[i][p].first << " " << land1.land[i][p].second << ", " << land2.land[i][q].first
<< " ) : " << function_value(land1.land[i][p - 1], land1.land[i][p - 1], land2.land[i][q].first)
<< "\n";
- std::cout << "oper( " << function_value(land1.land[i][p], land1.land[i][p - 1], land2.land[i][q].first) << ","
+ std::clog << "oper( " << function_value(land1.land[i][p], land1.land[i][p - 1], land2.land[i][q].first) << ","
<< land2.land[i][q].second << " : "
<< oper(land2.land[i][q].second,
function_value(land1.land[i][p], land1.land[i][p - 1], land2.land[i][q].first))
@@ -1040,19 +1040,19 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
continue;
}
if (land1.land[i][p].first == land2.land[i][q].first) {
- if (operation_on_pair_of_landscapesDBG) std::cout << "Third \n";
+ if (operation_on_pair_of_landscapesDBG) std::clog << "Third \n";
lambda_n.push_back(
std::make_pair(land2.land[i][q].first, oper(land1.land[i][p].second, land2.land[i][q].second)));
++p;
++q;
}
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "Next iteration \n";
+ std::clog << "Next iteration \n";
}
}
while ((p + 1 < land1.land[i].size()) && (q + 1 >= land2.land[i].size())) {
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "New point : " << land1.land[i][p].first
+ std::clog << "New point : " << land1.land[i][p].first
<< " oper(land1.land[i][p].second,0) : " << oper(land1.land[i][p].second, 0) << std::endl;
}
lambda_n.push_back(std::make_pair(land1.land[i][p].first, oper(land1.land[i][p].second, 0)));
@@ -1060,7 +1060,7 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
}
while ((p + 1 >= land1.land[i].size()) && (q + 1 < land2.land[i].size())) {
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "New point : " << land2.land[i][q].first
+ std::clog << "New point : " << land2.land[i][q].first
<< " oper(0,land2.land[i][q].second) : " << oper(0, land2.land[i][q].second) << std::endl;
}
lambda_n.push_back(std::make_pair(land2.land[i][q].first, oper(0, land2.land[i][q].second)));
@@ -1073,7 +1073,7 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
}
if (land1.land.size() > std::min(land1.land.size(), land2.land.size())) {
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "land1.land.size() > std::min( land1.land.size() , land2.land.size() )" << std::endl;
+ std::clog << "land1.land.size() > std::min( land1.land.size() , land2.land.size() )" << std::endl;
}
for (size_t i = std::min(land1.land.size(), land2.land.size()); i != std::max(land1.land.size(), land2.land.size());
++i) {
@@ -1088,7 +1088,7 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
}
if (land2.land.size() > std::min(land1.land.size(), land2.land.size())) {
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "( land2.land.size() > std::min( land1.land.size() , land2.land.size() ) ) " << std::endl;
+ std::clog << "( land2.land.size() > std::min( land1.land.size() , land2.land.size() ) ) " << std::endl;
}
for (size_t i = std::min(land1.land.size(), land2.land.size()); i != std::max(land1.land.size(), land2.land.size());
++i) {
@@ -1102,7 +1102,7 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
}
}
if (operation_on_pair_of_landscapesDBG) {
- std::cout << "operation_on_pair_of_landscapes END\n";
+ std::clog << "operation_on_pair_of_landscapes END\n";
std::cin.ignore();
}
return result;
@@ -1110,20 +1110,20 @@ Persistence_landscape operation_on_pair_of_landscapes(const Persistence_landscap
double compute_maximal_distance_non_symmetric(const Persistence_landscape& pl1, const Persistence_landscape& pl2) {
bool dbg = false;
- if (dbg) std::cerr << " compute_maximal_distance_non_symmetric \n";
+ if (dbg) std::clog << " compute_maximal_distance_non_symmetric \n";
// this distance is not symmetric. It compute ONLY distance between inflection points of pl1 and pl2.
double maxDist = 0;
size_t minimalNumberOfLevels = std::min(pl1.land.size(), pl2.land.size());
for (size_t level = 0; level != minimalNumberOfLevels; ++level) {
if (dbg) {
- std::cerr << "Level : " << level << std::endl;
- std::cerr << "PL1 : \n";
+ std::clog << "Level : " << level << std::endl;
+ std::clog << "PL1 : \n";
for (size_t i = 0; i != pl1.land[level].size(); ++i) {
- std::cerr << "(" << pl1.land[level][i].first << "," << pl1.land[level][i].second << ") \n";
+ std::clog << "(" << pl1.land[level][i].first << "," << pl1.land[level][i].second << ") \n";
}
- std::cerr << "PL2 : \n";
+ std::clog << "PL2 : \n";
for (size_t i = 0; i != pl2.land[level].size(); ++i) {
- std::cerr << "(" << pl2.land[level][i].first << "," << pl2.land[level][i].second << ") \n";
+ std::clog << "(" << pl2.land[level][i].first << "," << pl2.land[level][i].second << ") \n";
}
std::cin.ignore();
}
@@ -1143,24 +1143,24 @@ double compute_maximal_distance_non_symmetric(const Persistence_landscape& pl1,
if (maxDist <= val) maxDist = val;
if (dbg) {
- std::cerr << pl1.land[level][i].first << "in [" << pl2.land[level][p2Count].first << ","
+ std::clog << pl1.land[level][i].first << "in [" << pl2.land[level][p2Count].first << ","
<< pl2.land[level][p2Count + 1].first << "] \n";
- std::cerr << "pl1[level][i].second : " << pl1.land[level][i].second << std::endl;
- std::cerr << "function_value( pl2[level][p2Count] , pl2[level][p2Count+1] , pl1[level][i].first ) : "
+ std::clog << "pl1[level][i].second : " << pl1.land[level][i].second << std::endl;
+ std::clog << "function_value( pl2[level][p2Count] , pl2[level][p2Count+1] , pl1[level][i].first ) : "
<< function_value(pl2.land[level][p2Count], pl2.land[level][p2Count + 1], pl1.land[level][i].first)
<< std::endl;
- std::cerr << "val : " << val << std::endl;
+ std::clog << "val : " << val << std::endl;
std::cin.ignore();
}
}
}
- if (dbg) std::cerr << "minimalNumberOfLevels : " << minimalNumberOfLevels << std::endl;
+ if (dbg) std::clog << "minimalNumberOfLevels : " << minimalNumberOfLevels << std::endl;
if (minimalNumberOfLevels < pl1.land.size()) {
for (size_t level = minimalNumberOfLevels; level != pl1.land.size(); ++level) {
for (size_t i = 0; i != pl1.land[level].size(); ++i) {
- if (dbg) std::cerr << "pl1[level][i].second : " << pl1.land[level][i].second << std::endl;
+ if (dbg) std::clog << "pl1[level][i].second : " << pl1.land[level][i].second << std::endl;
if (maxDist < pl1.land[level][i].second) maxDist = pl1.land[level][i].second;
}
}
@@ -1181,7 +1181,7 @@ double compute_distance_of_landscapes(const Persistence_landscape& first, const
lan = lan.abs();
if (dbg) {
- std::cerr << "Abs of difference ; " << lan << std::endl;
+ std::clog << "Abs of difference ; " << lan << std::endl;
getchar();
}
@@ -1189,17 +1189,17 @@ double compute_distance_of_landscapes(const Persistence_landscape& first, const
// \int_{- \infty}^{+\infty}| first-second |^p
double result;
if (p != 1) {
- if (dbg) std::cerr << "Power != 1, compute integral to the power p\n";
+ if (dbg) std::clog << "Power != 1, compute integral to the power p\n";
result = lan.compute_integral_of_landscape(p);
} else {
- if (dbg) std::cerr << "Power = 1, compute integral \n";
+ if (dbg) std::clog << "Power = 1, compute integral \n";
result = lan.compute_integral_of_landscape();
}
// (\int_{- \infty}^{+\infty}| first-second |^p)^(1/p)
return pow(result, 1.0 / p);
} else {
// p == infty
- if (dbg) std::cerr << "Power = infty, compute maximum \n";
+ if (dbg) std::clog << "Power = infty, compute maximum \n";
return lan.compute_maximum();
}
}
@@ -1220,7 +1220,7 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
for (size_t level = 0; level != std::min(l1.size(), l2.size()); ++level) {
if (dbg) {
- std::cerr << "Computing inner product for a level : " << level << std::endl;
+ std::clog << "Computing inner product for a level : " << level << std::endl;
getchar();
}
auto&& l1_land_level = l1.land[level];
@@ -1267,14 +1267,14 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
result += contributionFromThisPart;
if (dbg) {
- std::cerr << "[l1_land_level[l1It].first,l1_land_level[l1It+1].first] : " << l1_land_level[l1It].first
+ std::clog << "[l1_land_level[l1It].first,l1_land_level[l1It+1].first] : " << l1_land_level[l1It].first
<< " , " << l1_land_level[l1It + 1].first << std::endl;
- std::cerr << "[l2_land_level[l2It].first,l2_land_level[l2It+1].first] : " << l2_land_level[l2It].first
+ std::clog << "[l2_land_level[l2It].first,l2_land_level[l2It+1].first] : " << l2_land_level[l2It].first
<< " , " << l2_land_level[l2It + 1].first << std::endl;
- std::cerr << "a : " << a << ", b : " << b << " , c: " << c << ", d : " << d << std::endl;
- std::cerr << "x1 : " << x1 << " , x2 : " << x2 << std::endl;
- std::cerr << "contributionFromThisPart : " << contributionFromThisPart << std::endl;
- std::cerr << "result : " << result << std::endl;
+ std::clog << "a : " << a << ", b : " << b << " , c: " << c << ", d : " << d << std::endl;
+ std::clog << "x1 : " << x1 << " , x2 : " << x2 << std::endl;
+ std::clog << "contributionFromThisPart : " << contributionFromThisPart << std::endl;
+ std::clog << "result : " << result << std::endl;
getchar();
}
@@ -1290,11 +1290,11 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
// in this case, we increment both:
++l2It;
if (dbg) {
- std::cerr << "Incrementing both \n";
+ std::clog << "Incrementing both \n";
}
} else {
if (dbg) {
- std::cerr << "Incrementing first \n";
+ std::clog << "Incrementing first \n";
}
}
++l1It;
@@ -1302,7 +1302,7 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
// in this case we increment l2It
++l2It;
if (dbg) {
- std::cerr << "Incrementing second \n";
+ std::clog << "Incrementing second \n";
}
}
@@ -1361,7 +1361,7 @@ void Persistence_landscape::plot(const char* filename, double xRangeBegin, doubl
}
out << "EOF" << std::endl;
}
- std::cout << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
+ std::clog << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
<< gnuplot_script.str().c_str() << "\'\"" << std::endl;
}
diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h
index 68bce336..537131da 100644
--- a/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h
+++ b/src/Persistence_representations/include/gudhi/Persistence_landscape_on_grid.h
@@ -155,9 +155,9 @@ class Persistence_landscape_on_grid {
double dx = (this->grid_max - this->grid_min) / static_cast<double>(this->values_of_landscapes.size() - 1);
if (dbg) {
- std::cerr << "this->grid_max : " << this->grid_max << std::endl;
- std::cerr << "this->grid_min : " << this->grid_min << std::endl;
- std::cerr << "this->values_of_landscapes.size() : " << this->values_of_landscapes.size() << std::endl;
+ std::clog << "this->grid_max : " << this->grid_max << std::endl;
+ std::clog << "this->grid_min : " << this->grid_min << std::endl;
+ std::clog << "this->values_of_landscapes.size() : " << this->values_of_landscapes.size() << std::endl;
getchar();
}
@@ -169,14 +169,14 @@ class Persistence_landscape_on_grid {
if (this->values_of_landscapes[i].size() > level) current_y = this->values_of_landscapes[i][level];
if (dbg) {
- std::cerr << "this->values_of_landscapes[i].size() : " << this->values_of_landscapes[i].size()
+ std::clog << "this->values_of_landscapes[i].size() : " << this->values_of_landscapes[i].size()
<< " , level : " << level << std::endl;
if (this->values_of_landscapes[i].size() > level)
- std::cerr << "this->values_of_landscapes[i][level] : " << this->values_of_landscapes[i][level] << std::endl;
- std::cerr << "previous_y : " << previous_y << std::endl;
- std::cerr << "current_y : " << current_y << std::endl;
- std::cerr << "dx : " << dx << std::endl;
- std::cerr << "0.5*dx*( previous_y + current_y ); " << 0.5 * dx * (previous_y + current_y) << std::endl;
+ std::clog << "this->values_of_landscapes[i][level] : " << this->values_of_landscapes[i][level] << std::endl;
+ std::clog << "previous_y : " << previous_y << std::endl;
+ std::clog << "current_y : " << current_y << std::endl;
+ std::clog << "dx : " << dx << std::endl;
+ std::clog << "0.5*dx*( previous_y + current_y ); " << 0.5 * dx * (previous_y + current_y) << std::endl;
}
result += 0.5 * dx * (previous_y + current_y);
@@ -213,10 +213,10 @@ class Persistence_landscape_on_grid {
if (this->values_of_landscapes[0].size() > level) previous_y = this->values_of_landscapes[0][level];
if (dbg) {
- std::cerr << "dx : " << dx << std::endl;
- std::cerr << "previous_x : " << previous_x << std::endl;
- std::cerr << "previous_y : " << previous_y << std::endl;
- std::cerr << "power : " << p << std::endl;
+ std::clog << "dx : " << dx << std::endl;
+ std::clog << "previous_x : " << previous_x << std::endl;
+ std::clog << "previous_y : " << previous_y << std::endl;
+ std::clog << "power : " << p << std::endl;
getchar();
}
@@ -225,7 +225,7 @@ class Persistence_landscape_on_grid {
double current_y = 0;
if (this->values_of_landscapes[i].size() > level) current_y = this->values_of_landscapes[i][level];
- if (dbg) std::cerr << "current_y : " << current_y << std::endl;
+ if (dbg) std::clog << "current_y : " << current_y << std::endl;
if (current_y == previous_y) continue;
@@ -235,7 +235,7 @@ class Persistence_landscape_on_grid {
double b = coef.second;
if (dbg) {
- std::cerr << "A line passing through points : (" << previous_x << "," << previous_y << ") and (" << current_x
+ std::clog << "A line passing through points : (" << previous_x << "," << previous_y << ") and (" << current_x
<< "," << current_y << ") is : " << a << "x+" << b << std::endl;
}
@@ -249,14 +249,14 @@ class Persistence_landscape_on_grid {
}
result += value_to_add;
if (dbg) {
- std::cerr << "Increasing result by : " << value_to_add << std::endl;
- std::cerr << "result : " << result << std::endl;
+ std::clog << "Increasing result by : " << value_to_add << std::endl;
+ std::clog << "result : " << result << std::endl;
getchar();
}
previous_x = current_x;
previous_y = current_y;
}
- if (dbg) std::cerr << "The total result is : " << result << std::endl;
+ if (dbg) std::clog << "The total result is : " << result << std::endl;
return result;
}
@@ -297,10 +297,10 @@ class Persistence_landscape_on_grid {
size_t position = size_t((x - this->grid_min) / dx);
if (dbg) {
- std::cerr << "This is a procedure compute_value_at_a_given_point \n";
- std::cerr << "level : " << level << std::endl;
- std::cerr << "x : " << x << std::endl;
- std::cerr << "position : " << position << std::endl;
+ std::clog << "This is a procedure compute_value_at_a_given_point \n";
+ std::clog << "level : " << level << std::endl;
+ std::clog << "x : " << x << std::endl;
+ std::clog << "position : " << position << std::endl;
}
// check if we are not exactly in the grid point:
if (almost_equal(position * dx + this->grid_min, x)) {
@@ -432,23 +432,23 @@ class Persistence_landscape_on_grid {
bool operator==(const Persistence_landscape_on_grid& rhs) const {
bool dbg = true;
if (this->values_of_landscapes.size() != rhs.values_of_landscapes.size()) {
- if (dbg) std::cerr << "values_of_landscapes of incompatible sizes\n";
+ if (dbg) std::clog << "values_of_landscapes of incompatible sizes\n";
return false;
}
if (!almost_equal(this->grid_min, rhs.grid_min)) {
- if (dbg) std::cerr << "grid_min not equal\n";
+ if (dbg) std::clog << "grid_min not equal\n";
return false;
}
if (!almost_equal(this->grid_max, rhs.grid_max)) {
- if (dbg) std::cerr << "grid_max not equal\n";
+ if (dbg) std::clog << "grid_max not equal\n";
return false;
}
for (size_t i = 0; i != this->values_of_landscapes.size(); ++i) {
for (size_t aa = 0; aa != this->values_of_landscapes[i].size(); ++aa) {
if (!almost_equal(this->values_of_landscapes[i][aa], rhs.values_of_landscapes[i][aa])) {
if (dbg) {
- std::cerr << "Problem in the position : " << i << " of values_of_landscapes. \n";
- std::cerr << this->values_of_landscapes[i][aa] << " " << rhs.values_of_landscapes[i][aa] << std::endl;
+ std::clog << "Problem in the position : " << i << " of values_of_landscapes. \n";
+ std::clog << this->values_of_landscapes[i][aa] << " " << rhs.values_of_landscapes[i][aa] << std::endl;
}
return false;
}
@@ -615,7 +615,7 @@ class Persistence_landscape_on_grid {
double previous_y_l1 = 0;
double previous_y_l2 = 0;
for (size_t i = 0; i != l1.values_of_landscapes.size(); ++i) {
- if (dbg) std::cerr << "i : " << i << std::endl;
+ if (dbg) std::clog << "i : " << i << std::endl;
double current_x = previous_x + dx;
double current_y_l1 = 0;
@@ -625,11 +625,11 @@ class Persistence_landscape_on_grid {
if (l2.values_of_landscapes[i].size() > level) current_y_l2 = l2.values_of_landscapes[i][level];
if (dbg) {
- std::cerr << "previous_x : " << previous_x << std::endl;
- std::cerr << "previous_y_l1 : " << previous_y_l1 << std::endl;
- std::cerr << "current_y_l1 : " << current_y_l1 << std::endl;
- std::cerr << "previous_y_l2 : " << previous_y_l2 << std::endl;
- std::cerr << "current_y_l2 : " << current_y_l2 << std::endl;
+ std::clog << "previous_x : " << previous_x << std::endl;
+ std::clog << "previous_y_l1 : " << previous_y_l1 << std::endl;
+ std::clog << "current_y_l1 : " << current_y_l1 << std::endl;
+ std::clog << "previous_y_l2 : " << previous_y_l2 << std::endl;
+ std::clog << "current_y_l2 : " << current_y_l2 << std::endl;
}
std::pair<double, double> l1_coords = compute_parameters_of_a_line(std::make_pair(previous_x, previous_y_l1),
@@ -646,11 +646,11 @@ class Persistence_landscape_on_grid {
double d = l2_coords.second;
if (dbg) {
- std::cerr << "Here are the formulas for a line: \n";
- std::cerr << "a : " << a << std::endl;
- std::cerr << "b : " << b << std::endl;
- std::cerr << "c : " << c << std::endl;
- std::cerr << "d : " << d << std::endl;
+ std::clog << "Here are the formulas for a line: \n";
+ std::clog << "a : " << a << std::endl;
+ std::clog << "b : " << b << std::endl;
+ std::clog << "c : " << c << std::endl;
+ std::clog << "d : " << d << std::endl;
}
// now, to compute the inner product in this interval we need to compute the integral of (ax+b)(cx+d) = acx^2 +
@@ -663,11 +663,11 @@ class Persistence_landscape_on_grid {
(a * d + b * c) / 2 * previous_x * previous_x + b * d * previous_x);
if (dbg) {
- std::cerr << "Value of the integral on the left end i.e. : " << previous_x << " is : "
+ std::clog << "Value of the integral on the left end i.e. : " << previous_x << " is : "
<< a * c / 3 * previous_x * previous_x * previous_x + (a * d + b * c) / 2 * previous_x * previous_x +
b * d * previous_x
<< std::endl;
- std::cerr << "Value of the integral on the right end i.e. : " << current_x << " is "
+ std::clog << "Value of the integral on the right end i.e. : " << current_x << " is "
<< a * c / 3 * current_x * current_x * current_x + (a * d + b * c) / 2 * current_x * current_x +
b * d * current_x
<< std::endl;
@@ -676,8 +676,8 @@ class Persistence_landscape_on_grid {
result += added_value;
if (dbg) {
- std::cerr << "added_value : " << added_value << std::endl;
- std::cerr << "result : " << result << std::endl;
+ std::clog << "added_value : " << added_value << std::endl;
+ std::clog << "result : " << result << std::endl;
getchar();
}
@@ -703,8 +703,8 @@ class Persistence_landscape_on_grid {
// time:
if (dbg) {
- std::cerr << "first : " << first << std::endl;
- std::cerr << "second : " << second << std::endl;
+ std::clog << "first : " << first << std::endl;
+ std::clog << "second : " << second << std::endl;
getchar();
}
@@ -712,14 +712,14 @@ class Persistence_landscape_on_grid {
Persistence_landscape_on_grid lan = first - second;
if (dbg) {
- std::cerr << "Difference : " << lan << std::endl;
+ std::clog << "Difference : " << lan << std::endl;
}
//| first-second |:
lan.abs();
if (dbg) {
- std::cerr << "Abs : " << lan << std::endl;
+ std::clog << "Abs : " << lan << std::endl;
}
if (p < std::numeric_limits<double>::max()) {
@@ -727,18 +727,18 @@ class Persistence_landscape_on_grid {
double result;
if (p != 1) {
if (dbg) {
- std::cerr << "p : " << p << std::endl;
+ std::clog << "p : " << p << std::endl;
getchar();
}
result = lan.compute_integral_of_landscape(p);
if (dbg) {
- std::cerr << "integral : " << result << std::endl;
+ std::clog << "integral : " << result << std::endl;
getchar();
}
} else {
result = lan.compute_integral_of_landscape();
if (dbg) {
- std::cerr << "integral, without power : " << result << std::endl;
+ std::clog << "integral, without power : " << result << std::endl;
getchar();
}
}
@@ -820,7 +820,7 @@ class Persistence_landscape_on_grid {
this->grid_max = (to_average[0])->grid_max;
if (dbg) {
- std::cerr << "Computations of average. The data from the current landscape have been cleared. We are ready to do "
+ std::clog << "Computations of average. The data from the current landscape have been cleared. We are ready to do "
"the computations. \n";
}
@@ -835,7 +835,7 @@ class Persistence_landscape_on_grid {
this->values_of_landscapes[grid_point] = std::vector<double>(maximal_size_of_vector);
if (dbg) {
- std::cerr << "We are considering the point : " << grid_point
+ std::clog << "We are considering the point : " << grid_point
<< " of the grid. In this point, there are at most : " << maximal_size_of_vector
<< " nonzero landscape functions \n";
}
@@ -931,12 +931,12 @@ void Persistence_landscape_on_grid::set_up_values_of_landscapes(const std::vecto
size_t number_of_points_, unsigned number_of_levels) {
bool dbg = false;
if (dbg) {
- std::cerr << "Here is the procedure : set_up_values_of_landscapes. The parameters are : grid_min_ : " << grid_min_
+ std::clog << "Here is the procedure : set_up_values_of_landscapes. The parameters are : grid_min_ : " << grid_min_
<< ", grid_max_ : " << grid_max_ << ", number_of_points_ : " << number_of_points_
<< ", number_of_levels: " << number_of_levels << std::endl;
- std::cerr << "Here are the intervals at our disposal : \n";
+ std::clog << "Here are the intervals at our disposal : \n";
for (size_t i = 0; i != p.size(); ++i) {
- std::cerr << p[i].first << " , " << p[i].second << std::endl;
+ std::clog << p[i].first << " , " << p[i].second << std::endl;
}
}
@@ -976,17 +976,17 @@ void Persistence_landscape_on_grid::set_up_values_of_landscapes(const std::vecto
size_t grid_interval_midpoint = (size_t)(0.5 * (grid_interval_begin + grid_interval_end));
if (dbg) {
- std::cerr << "Considering an interval : " << p[int_no].first << "," << p[int_no].second << std::endl;
+ std::clog << "Considering an interval : " << p[int_no].first << "," << p[int_no].second << std::endl;
- std::cerr << "grid_interval_begin : " << grid_interval_begin << std::endl;
- std::cerr << "grid_interval_end : " << grid_interval_end << std::endl;
- std::cerr << "grid_interval_midpoint : " << grid_interval_midpoint << std::endl;
+ std::clog << "grid_interval_begin : " << grid_interval_begin << std::endl;
+ std::clog << "grid_interval_end : " << grid_interval_end << std::endl;
+ std::clog << "grid_interval_midpoint : " << grid_interval_midpoint << std::endl;
}
double landscape_value = dx;
for (size_t i = grid_interval_begin + 1; i < grid_interval_midpoint; ++i) {
if (dbg) {
- std::cerr << "Adding landscape value (going up) for a point : " << i << " equal : " << landscape_value
+ std::clog << "Adding landscape value (going up) for a point : " << i << " equal : " << landscape_value
<< std::endl;
}
if (number_of_levels != std::numeric_limits<unsigned>::max()) {
@@ -1044,7 +1044,7 @@ void Persistence_landscape_on_grid::set_up_values_of_landscapes(const std::vecto
}
if (dbg) {
- std::cerr << "Adding landscape value (going down) for a point : " << i << " equal : " << landscape_value
+ std::clog << "Adding landscape value (going down) for a point : " << i << " equal : " << landscape_value
<< std::endl;
}
}
@@ -1246,7 +1246,7 @@ void Persistence_landscape_on_grid::plot(const char* filename, double min_x, dou
}
out << "EOF" << std::endl;
}
- std::cout << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
+ std::clog << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
<< gnuplot_script.str().c_str() << "\'\"" << std::endl;
}
diff --git a/src/Persistence_representations/include/gudhi/Persistence_vectors.h b/src/Persistence_representations/include/gudhi/Persistence_vectors.h
index 6776f4a3..fab96900 100644
--- a/src/Persistence_representations/include/gudhi/Persistence_vectors.h
+++ b/src/Persistence_representations/include/gudhi/Persistence_vectors.h
@@ -189,7 +189,7 @@ class Vector_distances_in_diagram {
}
out << std::endl;
out.close();
- std::cout << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
+ std::clog << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
<< gnuplot_script.str().c_str() << "\'\"" << std::endl;
}
@@ -360,9 +360,9 @@ template <typename F>
void Vector_distances_in_diagram<F>::compute_sorted_vector_of_distances_via_heap(size_t where_to_cut) {
bool dbg = false;
if (dbg) {
- std::cerr << "Here are the intervals : \n";
+ std::clog << "Here are the intervals : \n";
for (size_t i = 0; i != this->intervals.size(); ++i) {
- std::cerr << this->intervals[i].first << " , " << this->intervals[i].second << std::endl;
+ std::clog << this->intervals[i].first << " , " << this->intervals[i].second << std::endl;
}
}
where_to_cut = std::min(
@@ -385,14 +385,14 @@ void Vector_distances_in_diagram<F>::compute_sorted_vector_of_distances_via_heap
0.5 * (this->intervals[j].first + this->intervals[j].second)))));
if (dbg) {
- std::cerr << "Value : " << value << std::endl;
- std::cerr << "heap.front() : " << heap.front() << std::endl;
+ std::clog << "Value : " << value << std::endl;
+ std::clog << "heap.front() : " << heap.front() << std::endl;
getchar();
}
if (-value < heap.front()) {
if (dbg) {
- std::cerr << "Replacing : " << heap.front() << " with : " << -value << std::endl;
+ std::clog << "Replacing : " << heap.front() << " with : " << -value << std::endl;
getchar();
}
// remove the first element from the heap
@@ -431,11 +431,11 @@ void Vector_distances_in_diagram<F>::compute_sorted_vector_of_distances_via_heap
}
if (dbg) {
- std::cerr << "This is the heap after all the operations :\n";
+ std::clog << "This is the heap after all the operations :\n";
for (size_t i = 0; i != heap.size(); ++i) {
- std::cout << heap[i] << " ";
+ std::clog << heap[i] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
this->sorted_vector_of_distances = heap;
@@ -519,11 +519,11 @@ double Vector_distances_in_diagram<F>::distance(const Vector_distances_in_diagra
bool dbg = false;
if (dbg) {
- std::cerr << "Entering double Vector_distances_in_diagram<F>::distance( const Abs_Topological_data_with_distances* "
+ std::clog << "Entering double Vector_distances_in_diagram<F>::distance( const Abs_Topological_data_with_distances* "
"second , double power ) procedure \n";
- std::cerr << "Power : " << power << std::endl;
- std::cerr << "This : " << *this << std::endl;
- std::cerr << "second : " << second_ << std::endl;
+ std::clog << "Power : " << power << std::endl;
+ std::clog << "This : " << *this << std::endl;
+ std::clog << "second : " << second_ << std::endl;
}
double result = 0;
@@ -531,7 +531,7 @@ double Vector_distances_in_diagram<F>::distance(const Vector_distances_in_diagra
++i) {
if (power == 1) {
if (dbg) {
- std::cerr << "|" << this->sorted_vector_of_distances[i] << " - " << second_.sorted_vector_of_distances[i]
+ std::clog << "|" << this->sorted_vector_of_distances[i] << " - " << second_.sorted_vector_of_distances[i]
<< " | : " << fabs(this->sorted_vector_of_distances[i] - second_.sorted_vector_of_distances[i])
<< std::endl;
}
@@ -545,7 +545,7 @@ double Vector_distances_in_diagram<F>::distance(const Vector_distances_in_diagra
result = fabs(this->sorted_vector_of_distances[i] - second_.sorted_vector_of_distances[i]);
}
if (dbg) {
- std::cerr << "| " << this->sorted_vector_of_distances[i] << " - " << second_.sorted_vector_of_distances[i]
+ std::clog << "| " << this->sorted_vector_of_distances[i] << " - " << second_.sorted_vector_of_distances[i]
<< " : " << fabs(this->sorted_vector_of_distances[i] - second_.sorted_vector_of_distances[i])
<< std::endl;
}
diff --git a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
index 5c2d2038..a5bc1bca 100644
--- a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
+++ b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
@@ -50,28 +50,28 @@ std::vector<std::pair<double, double> > read_persistence_intervals_in_one_dimens
final_barcode.reserve(barcode_initial.size());
if (dbg) {
- std::cerr << "Here are the intervals that we read from the file : \n";
+ std::clog << "Here are the intervals that we read from the file : \n";
for (size_t i = 0; i != barcode_initial.size(); ++i) {
- std::cout << barcode_initial[i].first << " " << barcode_initial[i].second << std::endl;
+ std::clog << barcode_initial[i].first << " " << barcode_initial[i].second << std::endl;
}
getchar();
}
for (size_t i = 0; i != barcode_initial.size(); ++i) {
if (dbg) {
- std::cout << "COnsidering interval : " << barcode_initial[i].first << " " << barcode_initial[i].second
+ std::clog << "Considering interval : " << barcode_initial[i].first << " " << barcode_initial[i].second
<< std::endl;
}
if (barcode_initial[i].first > barcode_initial[i].second) {
// note that in this case barcode_initial[i].second != std::numeric_limits<double>::infinity()
- if (dbg) std::cout << "Swap and enter \n";
+ if (dbg) std::clog << "Swap and enter \n";
// swap them to make sure that birth < death
final_barcode.push_back(std::pair<double, double>(barcode_initial[i].second, barcode_initial[i].first));
continue;
} else {
if (barcode_initial[i].second != std::numeric_limits<double>::infinity()) {
- if (dbg) std::cout << "Simply enters\n";
+ if (dbg) std::clog << "Simply enters\n";
// in this case, due to the previous conditions we know that barcode_initial[i].first <
// barcode_initial[i].second, so we put them as they are
final_barcode.push_back(std::pair<double, double>(barcode_initial[i].first, barcode_initial[i].second));
@@ -91,11 +91,11 @@ std::vector<std::pair<double, double> > read_persistence_intervals_in_one_dimens
}
if (dbg) {
- std::cerr << "Here are the final bars that we are sending further : \n";
+ std::clog << "Here are the final bars that we are sending further : \n";
for (size_t i = 0; i != final_barcode.size(); ++i) {
- std::cout << final_barcode[i].first << " " << final_barcode[i].second << std::endl;
+ std::clog << final_barcode[i].first << " " << final_barcode[i].second << std::endl;
}
- std::cerr << "final_barcode.size() : " << final_barcode.size() << std::endl;
+ std::clog << "final_barcode.size() : " << final_barcode.size() << std::endl;
getchar();
}
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp
index 3d088b58..54b1f77d 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/average_persistence_heat_maps.cpp
@@ -17,12 +17,12 @@ using constant_scaling_function = Gudhi::Persistence_representations::constant_s
using Persistence_heat_maps = Gudhi::Persistence_representations::Persistence_heat_maps<constant_scaling_function>;
int main(int argc, char** argv) {
- std::cout << "This program computes average of persistence heat maps stored in files (the files needs to be "
+ std::clog << "This program computes average of persistence heat maps stored in files (the files needs to be "
<< "created beforehand).\n"
<< "The parameters of this programs are names of files with persistence heat maps.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -46,6 +46,6 @@ int main(int argc, char** argv) {
delete maps[i];
}
- std::cout << "Average can be found in 'average.mps' file\n";
+ std::clog << "Average can be found in 'average.mps' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp
index 48000bb1..757a97fc 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp
@@ -19,14 +19,14 @@ using constant_scaling_function = Gudhi::Persistence_representations::constant_s
using Persistence_heat_maps = Gudhi::Persistence_representations::Persistence_heat_maps<constant_scaling_function>;
int main(int argc, char** argv) {
- std::cout << "This program computes distance of persistence heat maps stored in files (the files needs to be "
+ std::clog << "This program computes distance of persistence heat maps stored in files (the files needs to be "
<< "created beforehand).\n"
<< "The first parameter of a program is an integer p. The program compute L^p distance of the two heat "
<< "maps. For L^infty distance choose p = -1. \n"
<< "The remaining parameters of this program are names of files with persistence heat maps.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -69,14 +69,14 @@ int main(int argc, char** argv) {
out.open("distance.mps");
for (size_t i = 0; i != distance.size(); ++i) {
for (size_t j = 0; j != distance.size(); ++j) {
- std::cout << distance[i][j] << " ";
+ std::clog << distance[i][j] << " ";
out << distance[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'distance.mps' file\n";
+ std::clog << "Distance can be found in 'distance.mps' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp
index 8a96f1b0..e7f18ce1 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp
@@ -18,12 +18,12 @@ using constant_scaling_function = Gudhi::Persistence_representations::constant_s
using Persistence_heat_maps = Gudhi::Persistence_representations::Persistence_heat_maps<constant_scaling_function>;
int main(int argc, char** argv) {
- std::cout << "This program computes scalar product of persistence heat maps stored in a file (the file needs to be "
+ std::clog << "This program computes scalar product of persistence heat maps stored in a file (the file needs to be "
<< "created beforehand). \n"
<< "The parameters of this programs are names of files with persistence heat maps.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -60,14 +60,14 @@ int main(int argc, char** argv) {
out.open("scalar_product.mps");
for (size_t i = 0; i != scalar_product.size(); ++i) {
for (size_t j = 0; j != scalar_product.size(); ++j) {
- std::cout << scalar_product[i][j] << " ";
+ std::clog << scalar_product[i][j] << " ";
out << scalar_product[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'scalar_product.mps' file\n";
+ std::clog << "Distance can be found in 'scalar_product.mps' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp
index f82a39b0..6b38b930 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp
@@ -20,7 +20,7 @@ using Persistence_heat_maps =
Gudhi::Persistence_representations::Persistence_heat_maps<arc_tan_of_persistence_of_point>;
int main(int argc, char** argv) {
- std::cout << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
+ std::clog << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
<< "provided as an input.The Gaussian kernels are weighted by the arc tangential of their persistence.\n"
<< "The first parameter of a program is an integer, a size of a grid.\n"
<< "The second and third parameters are min and max of the grid. If you want those numbers to be computed "
@@ -36,7 +36,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 7) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
@@ -58,7 +58,7 @@ int main(int argc, char** argv) {
std::vector<std::vector<double> > filter = Gudhi::Persistence_representations::create_Gaussian_filter(stdiv, 1);
for (size_t i = 0; i != filenames.size(); ++i) {
- std::cout << "Creating a heat map based on a file : " << filenames[i] << std::endl;
+ std::clog << "Creating a heat map based on a file : " << filenames[i] << std::endl;
Persistence_heat_maps l(filenames[i], filter, false, size_of_grid, min_, max_, dimension);
std::stringstream ss;
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp
index 5a657b13..fece2e36 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp
@@ -19,7 +19,7 @@ using distance_from_diagonal_scaling = Gudhi::Persistence_representations::dista
using Persistence_heat_maps = Gudhi::Persistence_representations::Persistence_heat_maps<distance_from_diagonal_scaling>;
int main(int argc, char** argv) {
- std::cout << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
+ std::clog << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
<< "provided as an input.The Gaussian kernels are weighted by the distance of a center from the "
<< "diagonal.\n"
<< "The first parameter of a program is an integer, a size of a grid.\n"
@@ -36,7 +36,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 7) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
@@ -58,7 +58,7 @@ int main(int argc, char** argv) {
std::vector<std::vector<double> > filter = Gudhi::Persistence_representations::create_Gaussian_filter(stdiv, 1);
for (size_t i = 0; i != filenames.size(); ++i) {
- std::cout << "Creating a heat map based on a file : " << filenames[i] << std::endl;
+ std::clog << "Creating a heat map based on a file : " << filenames[i] << std::endl;
Persistence_heat_maps l(filenames[i], filter, false, size_of_grid, min_, max_, dimension);
std::stringstream ss;
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp
index 8d67a54d..86e6fc19 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp
@@ -21,7 +21,7 @@ using Persistence_heat_maps =
Gudhi::Persistence_representations::Persistence_heat_maps<squared_distance_from_diagonal_scaling>;
int main(int argc, char** argv) {
- std::cout << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
+ std::clog << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
<< "provided as an input.The Gaussian kernels are weighted by the square of distance of a center from the "
<< "diagonal.\n"
<< "The first parameter of a program is an integer, a size of a grid.\n"
@@ -38,7 +38,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 7) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
@@ -60,7 +60,7 @@ int main(int argc, char** argv) {
std::vector<std::vector<double> > filter = Gudhi::Persistence_representations::create_Gaussian_filter(stdiv, 1);
for (size_t i = 0; i != filenames.size(); ++i) {
- std::cout << "Creating a heat map based on a file : " << filenames[i] << std::endl;
+ std::clog << "Creating a heat map based on a file : " << filenames[i] << std::endl;
Persistence_heat_maps l(filenames[i], filter, false, size_of_grid, min_, max_, dimension);
std::stringstream ss;
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp
index 29170c32..ca27f8e3 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_persistence_heat_maps.cpp
@@ -19,7 +19,7 @@ using constant_scaling_function = Gudhi::Persistence_representations::constant_s
using Persistence_heat_maps = Gudhi::Persistence_representations::Persistence_heat_maps<constant_scaling_function>;
int main(int argc, char** argv) {
- std::cout << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
+ std::clog << "This program creates persistence heat map files (*.mps) of persistence diagrams files (*.pers) "
<< "provided as an input.\n"
<< "The first parameter of a program is an integer, a size of a grid.\n"
<< "The second and third parameters are min and max of the grid. If you want those numbers to be computed "
@@ -35,7 +35,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 7) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
size_t size_of_grid = (size_t)atoi(argv[1]);
@@ -55,7 +55,7 @@ int main(int argc, char** argv) {
std::vector<std::vector<double> > filter = Gudhi::Persistence_representations::create_Gaussian_filter(stdiv, 1);
for (size_t i = 0; i != filenames.size(); ++i) {
- std::cout << "Creating a heat map based on file : " << filenames[i] << std::endl;
+ std::clog << "Creating a heat map based on file : " << filenames[i] << std::endl;
Persistence_heat_maps l(filenames[i], filter, false, size_of_grid, min_, max_, dimension);
std::stringstream ss;
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp
index 995771b9..d2ebcc7e 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/create_pssk.cpp
@@ -18,7 +18,7 @@
using PSSK = Gudhi::Persistence_representations::PSSK;
int main(int argc, char** argv) {
- std::cout << "This program creates PSSK files (*.pssk) of persistence diagrams files (*.pers) "
+ std::clog << "This program creates PSSK files (*.pssk) of persistence diagrams files (*.pers) "
<< "provided as an input.\n"
<< "The first parameter of a program is an integer, a size of a grid.\n"
<< "The second and third parameters are min and max of the grid. If you want those numbers to be computed "
@@ -34,7 +34,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 7) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
@@ -56,7 +56,7 @@ int main(int argc, char** argv) {
std::vector<std::vector<double> > filter = Gudhi::Persistence_representations::create_Gaussian_filter(stdiv, 1);
for (size_t i = 0; i != filenames.size(); ++i) {
- std::cout << "Creating a PSSK based on a file : " << filenames[i] << std::endl;
+ std::clog << "Creating a PSSK based on a file : " << filenames[i] << std::endl;
PSSK l(filenames[i], filter, size_of_grid, min_, max_, dimension);
std::stringstream ss;
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp b/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp
index cf6e07cb..87cc97d1 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/plot_persistence_heat_map.cpp
@@ -17,10 +17,10 @@ using constant_scaling_function = Gudhi::Persistence_representations::constant_s
using Persistence_heat_maps = Gudhi::Persistence_representations::Persistence_heat_maps<constant_scaling_function>;
int main(int argc, char** argv) {
- std::cout << "This program creates a gnuplot script from a persistence heat maps stored in a file (the file needs "
+ std::clog << "This program creates a gnuplot script from a persistence heat maps stored in a file (the file needs "
<< "to be created beforehand). Please call the code with the name of a single heat maps file \n";
if (argc != 2) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
Persistence_heat_maps l;
diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp
index 519cc47d..72325cad 100644
--- a/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp
+++ b/src/Persistence_representations/utilities/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp
@@ -18,7 +18,7 @@
using Persistence_intervals = Gudhi::Persistence_representations::Persistence_intervals;
int main(int argc, char** argv) {
- std::cout << "This program computes the range of birth and death times of persistence pairs in diagrams provided as "
+ std::clog << "This program computes the range of birth and death times of persistence pairs in diagrams provided as "
<< "an input.\n"
<< "The first parameter is the dimension of persistence to be used to create persistence intervals. "
<< "If your file contains the information about dimension of persistence pairs, please provide here the "
@@ -27,7 +27,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters of the program are the names of files with persistence diagrams.\n";
if (argc < 3) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
@@ -45,12 +45,12 @@ int main(int argc, char** argv) {
double max_ = -std::numeric_limits<double>::max();
for (size_t file_no = 0; file_no != filenames.size(); ++file_no) {
- std::cout << "Creating diagram based on a file : " << filenames[file_no] << std::endl;
+ std::clog << "Creating diagram based on a file : " << filenames[file_no] << std::endl;
Persistence_intervals p(filenames[file_no], dimension);
std::pair<double, double> min_max_ = p.get_x_range();
if (min_max_.first < min_) min_ = min_max_.first;
if (min_max_.second > max_) max_ = min_max_.second;
}
- std::cout << "Birth-death range : min: " << min_ << ", max: " << max_ << std::endl;
+ std::clog << "Birth-death range : min: " << min_ << ", max: " << max_ << std::endl;
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp
index 6155727a..465bf72e 100644
--- a/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp
+++ b/src/Persistence_representations/utilities/persistence_intervals/compute_bottleneck_distance.cpp
@@ -18,7 +18,7 @@
using Persistence_intervals_with_distances = Gudhi::Persistence_representations::Persistence_intervals_with_distances;
int main(int argc, char** argv) {
- std::cout << "This program computes the bottleneck distance of persistence pairs in diagrams provided as "
+ std::clog << "This program computes the bottleneck distance of persistence pairs in diagrams provided as "
<< "an input.\n"
<< "The first parameter is the dimension of persistence to be used to create persistence intervals. "
<< "If your file contains the information about dimension of persistence pairs, please provide here the "
@@ -27,7 +27,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters of the program are the names of files with persistence diagrams.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -70,14 +70,14 @@ int main(int argc, char** argv) {
out.open("distance.itv");
for (size_t i = 0; i != distance.size(); ++i) {
for (size_t j = 0; j != distance.size(); ++j) {
- std::cout << distance[i][j] << " ";
+ std::clog << distance[i][j] << " ";
out << distance[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'distance.itv' file\n";
+ std::clog << "Distance can be found in 'distance.itv' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp b/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp
index dd6e1a5b..ea1fe717 100644
--- a/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp
+++ b/src/Persistence_representations/utilities/persistence_intervals/compute_number_of_dominant_intervals.cpp
@@ -18,10 +18,10 @@
using Persistence_intervals = Gudhi::Persistence_representations::Persistence_intervals;
int main(int argc, char** argv) {
- std::cout << "This program compute the dominant intervals. A number of intervals to be displayed is a parameter of "
+ std::clog << "This program compute the dominant intervals. A number of intervals to be displayed is a parameter of "
"this program. \n";
if (argc != 4) {
- std::cout << "To run this program, please provide the name of a file with persistence diagram, dimension of "
+ std::clog << "To run this program, please provide the name of a file with persistence diagram, dimension of "
"intervals that should be taken into account (if your file contains only persistence pairs in a "
"single dimension, set it up to -1) and number of dominant intervals you would like to get \n";
return 1;
@@ -33,9 +33,9 @@ int main(int argc, char** argv) {
}
Persistence_intervals p(argv[1], dimension);
std::vector<std::pair<double, double> > dominant_intervals = p.dominant_intervals(atoi(argv[3]));
- std::cout << "Here are the dominant intervals : " << std::endl;
+ std::clog << "Here are the dominant intervals : " << std::endl;
for (size_t i = 0; i != dominant_intervals.size(); ++i) {
- std::cout << " " << dominant_intervals[i].first << "," << dominant_intervals[i].second << " " << std::endl;
+ std::clog << " " << dominant_intervals[i].first << "," << dominant_intervals[i].second << " " << std::endl;
}
return 0;
diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp
index 13d2133f..e5eec3f5 100644
--- a/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp
+++ b/src/Persistence_representations/utilities/persistence_intervals/plot_histogram_of_intervals_lengths.cpp
@@ -18,10 +18,10 @@
using Persistence_intervals = Gudhi::Persistence_representations::Persistence_intervals;
int main(int argc, char** argv) {
- std::cout << "This program computes a histogram of barcode's length. A number of bins in the histogram is a "
+ std::clog << "This program computes a histogram of barcode's length. A number of bins in the histogram is a "
<< "parameter of this program. \n";
if ((argc != 3) && (argc != 4)) {
- std::cout << "To run this program, please provide the name of a file with persistence diagram and number of "
+ std::clog << "To run this program, please provide the name of a file with persistence diagram and number of "
<< "dominant intervals you would like to get. Set a negative number dominant intervals value "
<< "If your file contains only birth-death pairs.\n"
<< "The third parameter is the dimension of the persistence that is to be used. If your "
@@ -59,7 +59,7 @@ int main(int argc, char** argv) {
out << std::endl;
out.close();
- std::cout << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
+ std::clog << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
<< gnuplot_script.str().c_str() << "\'\"" << std::endl;
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp
index 451be77f..27c69e07 100644
--- a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp
+++ b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_Betti_numbers.cpp
@@ -19,7 +19,7 @@ using Persistence_intervals = Gudhi::Persistence_representations::Persistence_in
int main(int argc, char** argv) {
if ((argc != 3) && (argc != 2)) {
- std::cout << "This program creates a gnuplot script of Betti numbers from a single persistence diagram file"
+ std::clog << "This program creates a gnuplot script of Betti numbers from a single persistence diagram file"
<< "(*.pers).\n"
<< "To run this program, please provide the name of a file with persistence diagram.\n"
<< "The second optional parameter of a program is the dimension of the persistence that is to be used. "
@@ -68,7 +68,7 @@ int main(int argc, char** argv) {
out << std::endl;
out.close();
- std::cout << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
+ std::clog << "To visualize, install gnuplot and type the command: gnuplot -persist -e \"load \'"
<< gnuplot_script.str().c_str() << "\'\"" << std::endl;
return 0;
diff --git a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp
index 09a56869..199a3266 100644
--- a/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp
+++ b/src/Persistence_representations/utilities/persistence_intervals/plot_persistence_intervals.cpp
@@ -19,7 +19,7 @@ using Persistence_intervals = Gudhi::Persistence_representations::Persistence_in
int main(int argc, char** argv) {
if ((argc != 3) && (argc != 2)) {
- std::cout << "This program creates a gnuplot script from a single persistence diagram file (*.pers).\n"
+ std::clog << "This program creates a gnuplot script from a single persistence diagram file (*.pers).\n"
<< "To run this program, please provide the name of a file with persistence diagram.\n"
<< "The second optional parameter of a program is the dimension of the persistence that is to be used. "
<< "If your file contains only birth-death pairs, you can skip this parameter.\n";
diff --git a/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp
index 04a0ada4..612e9700 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes/average_landscapes.cpp
@@ -16,13 +16,13 @@
using Persistence_landscape = Gudhi::Persistence_representations::Persistence_landscape;
int main(int argc, char** argv) {
- std::cout << "This program computes average of persistence landscapes stored in files (the files needs to be "
+ std::clog << "This program computes average of persistence landscapes stored in files (the files needs to be "
<< "created beforehand).\n"
<< "The parameters of this programs are names of files with persistence landscapes.\n";
std::vector<const char*> filenames;
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -46,6 +46,6 @@ int main(int argc, char** argv) {
delete lands[i];
}
- std::cout << "Average can be found in 'average.land' file\n";
+ std::clog << "Average can be found in 'average.land' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp
index 1093c1aa..2246a37d 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes/compute_distance_of_landscapes.cpp
@@ -18,14 +18,14 @@
using Persistence_landscape = Gudhi::Persistence_representations::Persistence_landscape;
int main(int argc, char** argv) {
- std::cout << "This program computes distance of persistence landscapes stored in files (the files needs to be "
+ std::clog << "This program computes distance of persistence landscapes stored in files (the files needs to be "
<< "created beforehand).\n"
<< "The first parameter of a program is an integer p. The program compute L^p distance of the two heat "
<< "maps. For L^infty distance choose p = -1. \n"
<< "The remaining parameters of this program are names of files with persistence landscapes.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -68,14 +68,14 @@ int main(int argc, char** argv) {
out.open("distance.land");
for (size_t i = 0; i != distance.size(); ++i) {
for (size_t j = 0; j != distance.size(); ++j) {
- std::cout << distance[i][j] << " ";
+ std::clog << distance[i][j] << " ";
out << distance[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'distance.land' file\n";
+ std::clog << "Distance can be found in 'distance.land' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp
index 16b76497..44f50543 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes/compute_scalar_product_of_landscapes.cpp
@@ -17,12 +17,12 @@
using Persistence_landscape = Gudhi::Persistence_representations::Persistence_landscape;
int main(int argc, char** argv) {
- std::cout << "This program computes scalar product of persistence landscapes stored in a file (the file needs to be "
+ std::clog << "This program computes scalar product of persistence landscapes stored in a file (the file needs to be "
<< "created beforehand). \n"
<< "The parameters of this programs are names of files with persistence landscapes.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -59,14 +59,14 @@ int main(int argc, char** argv) {
out.open("scalar_product.land");
for (size_t i = 0; i != scalar_product.size(); ++i) {
for (size_t j = 0; j != scalar_product.size(); ++j) {
- std::cout << scalar_product[i][j] << " ";
+ std::clog << scalar_product[i][j] << " ";
out << scalar_product[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'scalar_product.land' file\n";
+ std::clog << "Distance can be found in 'scalar_product.land' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp
index 4d772086..fab5c75f 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes/create_landscapes.cpp
@@ -18,7 +18,7 @@
using Persistence_landscape = Gudhi::Persistence_representations::Persistence_landscape;
int main(int argc, char** argv) {
- std::cout << "This program creates persistence landscapes files (*.land) of persistence diagrams files (*.pers) "
+ std::clog << "This program creates persistence landscapes files (*.land) of persistence diagrams files (*.pers) "
<< "provided as an input.\n"
<< "The first parameter of this program is a dimension of persistence that will be used in creation of "
<< "the persistence heat maps."
@@ -29,7 +29,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 3) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
std::vector<const char*> filenames;
@@ -43,7 +43,7 @@ int main(int argc, char** argv) {
}
for (size_t i = 0; i != filenames.size(); ++i) {
- std::cout << "Creating a landscape based on file : " << filenames[i] << std::endl;
+ std::clog << "Creating a landscape based on file : " << filenames[i] << std::endl;
Persistence_landscape l(filenames[i], dimension);
std::stringstream ss;
ss << filenames[i] << ".land";
diff --git a/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp b/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp
index 1fe03640..da9b9bba 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes/plot_landscapes.cpp
@@ -16,10 +16,10 @@
using Persistence_landscape = Gudhi::Persistence_representations::Persistence_landscape;
int main(int argc, char** argv) {
- std::cout << "This program creates a gnuplot script from a persistence landscape stored in a file (the file needs "
+ std::clog << "This program creates a gnuplot script from a persistence landscape stored in a file (the file needs "
<< "to be created beforehand). Please call the code with the name of a single landscape file.\n";
if (argc != 2) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp
index f92cde72..39f7a67f 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp
@@ -16,12 +16,12 @@
using Persistence_landscape_on_grid = Gudhi::Persistence_representations::Persistence_landscape_on_grid;
int main(int argc, char** argv) {
- std::cout << "This program computes average of persistence landscapes on grid stored in files (the files needs to "
+ std::clog << "This program computes average of persistence landscapes on grid stored in files (the files needs to "
<< "be created beforehand).\n"
<< "The parameters of this programs are names of files with persistence landscapes on grid.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -46,6 +46,6 @@ int main(int argc, char** argv) {
delete lands[i];
}
- std::cout << "Average can be found in 'average.g_land' file\n";
+ std::clog << "Average can be found in 'average.g_land' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp
index baec6aeb..01fd09d8 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp
@@ -18,14 +18,14 @@
using Persistence_landscape_on_grid = Gudhi::Persistence_representations::Persistence_landscape_on_grid;
int main(int argc, char** argv) {
- std::cout << "This program computes distance of persistence landscapes on grid stored in files (the files needs to "
+ std::clog << "This program computes distance of persistence landscapes on grid stored in files (the files needs to "
<< "be created beforehand).\n"
<< "The first parameter of a program is an integer p. The program compute L^p distance of the two heat "
<< "maps. For L^infty distance choose p = -1. \n"
<< "The remaining parameters of this program are names of files with persistence landscapes on grid.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -68,14 +68,14 @@ int main(int argc, char** argv) {
out.open("distance.g_land");
for (size_t i = 0; i != distance.size(); ++i) {
for (size_t j = 0; j != distance.size(); ++j) {
- std::cout << distance[i][j] << " ";
+ std::clog << distance[i][j] << " ";
out << distance[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'distance.g_land' file\n";
+ std::clog << "Distance can be found in 'distance.g_land' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp
index e94dacdb..71c2f419 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp
@@ -17,13 +17,13 @@
using Persistence_landscape_on_grid = Gudhi::Persistence_representations::Persistence_landscape_on_grid;
int main(int argc, char** argv) {
- std::cout
+ std::clog
<< "This program computes scalar product of persistence landscapes on grid stored in a file (the file needs to "
<< "be created beforehand). \n"
<< "The parameters of this programs are names of files with persistence landscapes on grid.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -60,14 +60,14 @@ int main(int argc, char** argv) {
out.open("scalar_product.g_land");
for (size_t i = 0; i != scalar_product.size(); ++i) {
for (size_t j = 0; j != scalar_product.size(); ++j) {
- std::cout << scalar_product[i][j] << " ";
+ std::clog << scalar_product[i][j] << " ";
out << scalar_product[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'scalar_product.g_land' file\n";
+ std::clog << "Distance can be found in 'scalar_product.g_land' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp
index d510c3df..788313c4 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp
@@ -18,7 +18,7 @@
using Persistence_landscape_on_grid = Gudhi::Persistence_representations::Persistence_landscape_on_grid;
int main(int argc, char** argv) {
- std::cout << "This program creates persistence landscapes on grid files (*.g_land) of persistence diagrams files "
+ std::clog << "This program creates persistence landscapes on grid files (*.g_land) of persistence diagrams files "
<< "(*.pers) provided as an input.\n"
<< "The first parameter of a program is an integer, a size of a grid.\n"
<< "The second and third parameters are min and max of the grid. If you want those numbers to be computed "
@@ -32,7 +32,7 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 6) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
@@ -51,7 +51,7 @@ int main(int argc, char** argv) {
}
for (size_t i = 0; i != filenames.size(); ++i) {
- std::cout << "Creating persistence landscape on a grid based on a file : " << filenames[i] << std::endl;
+ std::clog << "Creating persistence landscape on a grid based on a file : " << filenames[i] << std::endl;
Persistence_landscape_on_grid l;
if ((min_ != -1) || (max_ != -1)) {
l = Persistence_landscape_on_grid(filenames[i], min_, max_, size_of_grid, dimension);
diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp
index 4e20f37f..ec6112b5 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp
+++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp
@@ -16,11 +16,11 @@
using Persistence_landscape_on_grid = Gudhi::Persistence_representations::Persistence_landscape_on_grid;
int main(int argc, char** argv) {
- std::cout << "This program creates a gnuplot script from a persistence landscape on grid stored in a file (the file "
+ std::clog << "This program creates a gnuplot script from a persistence landscape on grid stored in a file (the file "
<< "needs to be created beforehand). Please call the code with the name of a single landscape on grid file"
<< ".\n";
if (argc != 2) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
diff --git a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp
index 89e42f0f..4eb32eb3 100644
--- a/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp
+++ b/src/Persistence_representations/utilities/persistence_vectors/average_persistence_vectors.cpp
@@ -17,12 +17,12 @@ using Euclidean_distance = Gudhi::Euclidean_distance;
using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram<Euclidean_distance>;
int main(int argc, char** argv) {
- std::cout << "This program computes average of persistence vectors stored in files (the files needs to "
+ std::clog << "This program computes average of persistence vectors stored in files (the files needs to "
<< "be created beforehand).\n"
<< "The parameters of this programs are names of files with persistence vectors.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -47,7 +47,7 @@ int main(int argc, char** argv) {
delete lands[i];
}
- std::cout << "Done \n";
+ std::clog << "Done \n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp
index 541dd25f..236981a3 100644
--- a/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp
+++ b/src/Persistence_representations/utilities/persistence_vectors/compute_distance_of_persistence_vectors.cpp
@@ -19,14 +19,14 @@ using Euclidean_distance = Gudhi::Euclidean_distance;
using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram<Euclidean_distance>;
int main(int argc, char** argv) {
- std::cout << "This program compute distance of persistence vectors stored in a file (the file needs to be created "
+ std::clog << "This program compute distance of persistence vectors stored in a file (the file needs to be created "
"beforehand). \n";
- std::cout << "The first parameter of a program is an integer p. The program compute l^p distance of the vectors. For "
+ std::clog << "The first parameter of a program is an integer p. The program compute l^p distance of the vectors. For "
"l^infty distance choose p = -1. \n";
- std::cout << "The remaining parameters of this programs are names of files with persistence vectors.\n";
+ std::clog << "The remaining parameters of this programs are names of files with persistence vectors.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -69,14 +69,14 @@ int main(int argc, char** argv) {
out.open("distance.vect");
for (size_t i = 0; i != distance.size(); ++i) {
for (size_t j = 0; j != distance.size(); ++j) {
- std::cout << distance[i][j] << " ";
+ std::clog << distance[i][j] << " ";
out << distance[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'distance.vect' file\n";
+ std::clog << "Distance can be found in 'distance.vect' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp
index bbc50c98..c6ea0e1c 100644
--- a/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp
+++ b/src/Persistence_representations/utilities/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp
@@ -19,12 +19,12 @@ using Euclidean_distance = Gudhi::Euclidean_distance;
using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram<Euclidean_distance>;
int main(int argc, char** argv) {
- std::cout << "This program computes scalar product of persistence vectors stored in a file (the file needs to "
+ std::clog << "This program computes scalar product of persistence vectors stored in a file (the file needs to "
<< "be created beforehand). \n"
<< "The parameters of this programs are names of files with persistence vectors.\n";
if (argc < 3) {
- std::cout << "Wrong number of parameters, the program will now terminate \n";
+ std::clog << "Wrong number of parameters, the program will now terminate \n";
return 1;
}
@@ -61,14 +61,14 @@ int main(int argc, char** argv) {
out.open("scalar_product.vect");
for (size_t i = 0; i != scalar_product.size(); ++i) {
for (size_t j = 0; j != scalar_product.size(); ++j) {
- std::cout << scalar_product[i][j] << " ";
+ std::clog << scalar_product[i][j] << " ";
out << scalar_product[i][j] << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
out << std::endl;
}
out.close();
- std::cout << "Distance can be found in 'scalar_product.vect' file\n";
+ std::clog << "Distance can be found in 'scalar_product.vect' file\n";
return 0;
}
diff --git a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp
index f974c3d3..608e04e5 100644
--- a/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp
+++ b/src/Persistence_representations/utilities/persistence_vectors/create_persistence_vectors.cpp
@@ -19,7 +19,7 @@ using Euclidean_distance = Gudhi::Euclidean_distance;
using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram<Euclidean_distance>;
int main(int argc, char** argv) {
- std::cout << "This program creates persistence vectors files (*.vect) of persistence diagrams files (*.pers) "
+ std::clog << "This program creates persistence vectors files (*.vect) of persistence diagrams files (*.pers) "
<< "provided as an input.\n"
<< "The first parameter of this program is a dimension of persistence that will be used in creation of "
<< "the persistence heat maps."
@@ -30,11 +30,11 @@ int main(int argc, char** argv) {
<< "The remaining parameters are the names of files with persistence diagrams. \n";
if (argc < 3) {
- std::cout << "Wrong parameter list, the program will now terminate \n";
+ std::clog << "Wrong parameter list, the program will now terminate \n";
return 1;
}
- std::cout << "The remaining parameters are the names of files with persistence diagrams. \n";
+ std::clog << "The remaining parameters are the names of files with persistence diagrams. \n";
int dim = atoi(argv[1]);
unsigned dimension = std::numeric_limits<unsigned>::max();
if (dim >= 0) {
diff --git a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp
index de08fcfe..2decb134 100644
--- a/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp
+++ b/src/Persistence_representations/utilities/persistence_vectors/plot_persistence_vectors.cpp
@@ -17,10 +17,10 @@ using Euclidean_distance = Gudhi::Euclidean_distance;
using Vector_distances_in_diagram = Gudhi::Persistence_representations::Vector_distances_in_diagram<Euclidean_distance>;
int main(int argc, char** argv) {
- std::cout << "This program create a Gnuplot script to plot persistence vector. Please call this program with the "
+ std::clog << "This program create a Gnuplot script to plot persistence vector. Please call this program with the "
"name of file with persistence vector. \n";
if (argc != 2) {
- std::cout << "Wrong number of parameters, the program will now terminate. \n";
+ std::clog << "Wrong number of parameters, the program will now terminate. \n";
return 1;
}
Vector_distances_in_diagram l;
diff --git a/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp
index 45757002..030b072a 100644
--- a/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp
+++ b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp
@@ -74,7 +74,7 @@ int main(int argc, char * argv[]) {
Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), threshold, Gudhi::Euclidean_distance());
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << "Compute Rips graph in " << elapsed_sec << " ms.\n";
+ std::clog << "Compute Rips graph in " << elapsed_sec << " ms.\n";
// Construct the Rips complex in a Simplex Tree
Simplex_tree st;
@@ -86,16 +86,16 @@ int main(int argc, char * argv[]) {
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << "Compute Rips complex in " << elapsed_sec << " ms.\n";
- std::cout << " - dimension = " << st.dimension() << std::endl;
- std::cout << " - number of simplices = " << st.num_simplices() << std::endl;
+ std::clog << "Compute Rips complex in " << elapsed_sec << " ms.\n";
+ std::clog << " - dimension = " << st.dimension() << std::endl;
+ std::clog << " - number of simplices = " << st.num_simplices() << std::endl;
// Sort the simplices in the order of the filtration
start = std::chrono::system_clock::now();
st.initialize_filtration();
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << "Order the simplices of the filtration in " << elapsed_sec << " ms.\n";
+ std::clog << "Order the simplices of the filtration in " << elapsed_sec << " ms.\n";
// Copy the keys inside the simplices
start = std::chrono::system_clock::now();
@@ -106,22 +106,22 @@ int main(int argc, char * argv[]) {
}
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << "Copied the keys inside the simplices in " << elapsed_sec << " ms.\n";
+ std::clog << "Copied the keys inside the simplices in " << elapsed_sec << " ms.\n";
// Convert the simplex tree into a hasse diagram
start = std::chrono::system_clock::now();
Gudhi::Hasse_complex<> hcpx(st);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << "Convert the simplex tree into a Hasse diagram in " << elapsed_sec << " ms.\n";
+ std::clog << "Convert the simplex tree into a Hasse diagram in " << elapsed_sec << " ms.\n";
- std::cout << "Timings when using a simplex tree: \n";
+ std::clog << "Timings when using a simplex tree: \n";
timing_persistence(st, p);
timing_persistence(st, q);
timing_persistence(st, p, q);
- std::cout << "Timings when using a Hasse complex: \n";
+ std::clog << "Timings when using a Hasse complex: \n";
timing_persistence(hcpx, p);
timing_persistence(hcpx, q);
timing_persistence(hcpx, p, q);
@@ -130,7 +130,7 @@ int main(int argc, char * argv[]) {
}
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << "Running the complex destructors in " << elapsed_sec << " ms.\n";
+ std::clog << "Running the complex destructors in " << elapsed_sec << " ms.\n";
return 0;
}
@@ -145,13 +145,13 @@ timing_persistence(FilteredComplex & cpx
Gudhi::persistent_cohomology::Persistent_cohomology< FilteredComplex, Field_Zp > pcoh(cpx);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Initialize pcoh in " << elapsed_sec << " ms.\n";
+ std::clog << " Initialize pcoh in " << elapsed_sec << " ms.\n";
// initializes the coefficient field for homology
start = std::chrono::system_clock::now();
pcoh.init_coefficients(p);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Initialize the coefficient field in " << elapsed_sec << " ms.\n";
+ std::clog << " Initialize the coefficient field in " << elapsed_sec << " ms.\n";
start = std::chrono::system_clock::now();
@@ -159,12 +159,12 @@ timing_persistence(FilteredComplex & cpx
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Compute persistent homology in Z/" << p << "Z in " << elapsed_sec << " ms.\n";
+ std::clog << " Compute persistent homology in Z/" << p << "Z in " << elapsed_sec << " ms.\n";
start = std::chrono::system_clock::now();
}
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Run the persistence destructors in " << elapsed_sec << " ms.\n";
+ std::clog << " Run the persistence destructors in " << elapsed_sec << " ms.\n";
}
template< typename FilteredComplex>
@@ -179,13 +179,13 @@ timing_persistence(FilteredComplex & cpx
Gudhi::persistent_cohomology::Persistent_cohomology< FilteredComplex, Multi_field > pcoh(cpx);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Initialize pcoh in " << elapsed_sec << " ms.\n";
+ std::clog << " Initialize pcoh in " << elapsed_sec << " ms.\n";
// initializes the coefficient field for homology
start = std::chrono::system_clock::now();
pcoh.init_coefficients(p, q);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Initialize the coefficient field in " << elapsed_sec << " ms.\n";
+ std::clog << " Initialize the coefficient field in " << elapsed_sec << " ms.\n";
// compute persistent homology, disgarding persistent features of life shorter than min_persistence
start = std::chrono::system_clock::now();
@@ -194,11 +194,11 @@ timing_persistence(FilteredComplex & cpx
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Compute multi-field persistent homology in all coefficient fields Z/pZ "
+ std::clog << " Compute multi-field persistent homology in all coefficient fields Z/pZ "
<< "with p in [" << p << ";" << q << "] in " << elapsed_sec << " ms.\n";
start = std::chrono::system_clock::now();
}
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
- std::cout << " Run the persistence destructors in " << elapsed_sec << " ms.\n";
+ std::clog << " Run the persistence destructors in " << elapsed_sec << " ms.\n";
}
diff --git a/src/Persistent_cohomology/example/custom_persistence_sort.cpp b/src/Persistent_cohomology/example/custom_persistence_sort.cpp
index be74cf50..410cd987 100644
--- a/src/Persistent_cohomology/example/custom_persistence_sort.cpp
+++ b/src/Persistent_cohomology/example/custom_persistence_sort.cpp
@@ -70,26 +70,23 @@ struct cmp_intervals_by_dim_then_length {
int main(int argc, char **argv) {
std::vector<Point> points = random_points();
- std::cout << "Points size=" << points.size() << std::endl;
+ std::clog << "Points size=" << points.size() << std::endl;
// Alpha complex persistence computation from generated points
Alpha_complex alpha_complex_from_points(points);
- std::cout << "alpha_complex_from_points" << std::endl;
+ std::clog << "alpha_complex_from_points" << std::endl;
Simplex_tree simplex;
- std::cout << "simplex" << std::endl;
+ std::clog << "simplex" << std::endl;
if (alpha_complex_from_points.create_complex(simplex, 0.6)) {
- std::cout << "simplex" << std::endl;
+ std::clog << "simplex" << std::endl;
// ----------------------------------------------------------------------------
// Display information about the alpha complex
// ----------------------------------------------------------------------------
- std::cout << "Simplicial complex is of dimension " << simplex.dimension() <<
+ std::clog << "Simplicial complex is of dimension " << simplex.dimension() <<
" - " << simplex.num_simplices() << " simplices - " <<
simplex.num_vertices() << " vertices." << std::endl;
- // Sort the simplices in the order of the filtration
- simplex.initialize_filtration();
-
- std::cout << "Simplex_tree dim: " << simplex.dimension() << std::endl;
+ std::clog << "Simplex_tree dim: " << simplex.dimension() << std::endl;
Persistent_cohomology pcoh(simplex);
@@ -102,23 +99,23 @@ int main(int argc, char **argv) {
auto persistent_pairs = pcoh.get_persistent_pairs();
std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp);
for (auto pair : persistent_pairs) {
- std::cout << simplex.dimension(get<0>(pair)) << " "
+ std::clog << simplex.dimension(get<0>(pair)) << " "
<< simplex.filtration(get<0>(pair)) << " "
<< simplex.filtration(get<1>(pair)) << std::endl;
}
// Persistent Betti numbers
- std::cout << "The persistent Betti numbers in interval [0.40, 0.41] are : ";
+ std::clog << "The persistent Betti numbers in interval [0.40, 0.41] are : ";
for (int dim = 0; dim < simplex.dimension(); dim++)
- std::cout << "b" << dim << " = " << pcoh.persistent_betti_number(dim, 0.40, 0.41) << " ; ";
- std::cout << std::endl;
+ std::clog << "b" << dim << " = " << pcoh.persistent_betti_number(dim, 0.40, 0.41) << " ; ";
+ std::clog << std::endl;
// Betti numbers
std::vector<int> betti_numbers = pcoh.betti_numbers();
- std::cout << "The Betti numbers are : ";
+ std::clog << "The Betti numbers are : ";
for (std::size_t i = 0; i < betti_numbers.size(); i++)
- std::cout << "b" << i << " = " << betti_numbers[i] << " ; ";
- std::cout << std::endl;
+ std::clog << "b" << i << " = " << betti_numbers[i] << " ; ";
+ std::clog << std::endl;
}
return 0;
}
diff --git a/src/Persistent_cohomology/example/persistence_from_file.cpp b/src/Persistent_cohomology/example/persistence_from_file.cpp
index d169cc63..38c44514 100644
--- a/src/Persistent_cohomology/example/persistence_from_file.cpp
+++ b/src/Persistent_cohomology/example/persistence_from_file.cpp
@@ -37,9 +37,9 @@ int main(int argc, char * argv[]) {
program_options(argc, argv, simplex_tree_file, output_file, p, min_persistence);
- std::cout << "Simplex_tree from file=" << simplex_tree_file.c_str() << " - output_file=" << output_file.c_str()
+ std::clog << "Simplex_tree from file=" << simplex_tree_file.c_str() << " - output_file=" << output_file.c_str()
<< std::endl;
- std::cout << " - p=" << p << " - min_persistence=" << min_persistence << std::endl;
+ std::clog << " - p=" << p << " - min_persistence=" << min_persistence << std::endl;
// Read the list of simplices from a file.
Simplex_tree<> simplex_tree;
@@ -47,21 +47,18 @@ int main(int argc, char * argv[]) {
std::ifstream simplex_tree_stream(simplex_tree_file);
simplex_tree_stream >> simplex_tree;
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << simplex_tree.dimension() << std::endl;
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices" << std::endl;
+ std::clog << " - dimension " << simplex_tree.dimension() << std::endl;
/*
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for( auto f_simplex : simplex_tree.filtration_simplex_range() )
- { std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ { std::clog << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
for( auto vertex : simplex_tree.simplex_vertex_range(f_simplex) )
- { std::cout << vertex << " "; }
- std::cout << std::endl;
+ { std::clog << vertex << " "; }
+ std::clog << std::endl;
}*/
- // Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
-
// Compute the persistence diagram of the complex
Persistent_cohomology< Simplex_tree<>, Field_Zp > pcoh(simplex_tree);
// initializes the coefficient field for homology
@@ -96,7 +93,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&output_file)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
+ "Name of file in which the persistence diagram is written. Default print in std::clog")
("field-charac,p", po::value<int>(&p)->default_value(11),
"Characteristic p of the coefficient field Z/pZ for computing homology.")
("min-persistence,m", po::value<Filtration_value>(&min_persistence),
@@ -114,17 +111,17 @@ void program_options(int argc, char * argv[]
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Rips complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Rips complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
index 3c91662f..bffaabdd 100644
--- a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
+++ b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
@@ -51,62 +51,62 @@ int main(int argc, char * const argv[]) {
}
// TEST OF INSERTION
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF INSERTION" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF INSERTION" << std::endl;
Simplex_tree st;
// ++ FIRST
- std::cout << " - INSERT (0,1,2)" << std::endl;
+ std::clog << " - INSERT (0,1,2)" << std::endl;
typeVectorVertex SimplexVector = {0, 1, 2};
st.insert_simplex_and_subfaces(SimplexVector, 0.3);
// ++ SECOND
- std::cout << " - INSERT 3" << std::endl;
+ std::clog << " - INSERT 3" << std::endl;
SimplexVector = {3};
st.insert_simplex_and_subfaces(SimplexVector, 0.1);
// ++ THIRD
- std::cout << " - INSERT (0,3)" << std::endl;
+ std::clog << " - INSERT (0,3)" << std::endl;
SimplexVector = {0, 3};
st.insert_simplex_and_subfaces(SimplexVector, 0.2);
// ++ FOURTH
- std::cout << " - INSERT (0,1) (already inserted)" << std::endl;
+ std::clog << " - INSERT (0,1) (already inserted)" << std::endl;
SimplexVector = {0, 1};
st.insert_simplex_and_subfaces(SimplexVector, 0.2);
// ++ FIFTH
- std::cout << " - INSERT (3,4,5)" << std::endl;
+ std::clog << " - INSERT (3,4,5)" << std::endl;
SimplexVector = {3, 4, 5};
st.insert_simplex_and_subfaces(SimplexVector, 0.3);
// ++ SIXTH
- std::cout << " - INSERT (0,1,6,7)" << std::endl;
+ std::clog << " - INSERT (0,1,6,7)" << std::endl;
SimplexVector = {0, 1, 6, 7};
st.insert_simplex_and_subfaces(SimplexVector, 0.4);
// ++ SEVENTH
- std::cout << " - INSERT (4,5,8,9)" << std::endl;
+ std::clog << " - INSERT (4,5,8,9)" << std::endl;
SimplexVector = {4, 5, 8, 9};
st.insert_simplex_and_subfaces(SimplexVector, 0.4);
// ++ EIGHTH
- std::cout << " - INSERT (9,10,11)" << std::endl;
+ std::clog << " - INSERT (9,10,11)" << std::endl;
SimplexVector = {9, 10, 11};
st.insert_simplex_and_subfaces(SimplexVector, 0.3);
// ++ NINETH
- std::cout << " - INSERT (2,10,12)" << std::endl;
+ std::clog << " - INSERT (2,10,12)" << std::endl;
SimplexVector = {2, 10, 12};
st.insert_simplex_and_subfaces(SimplexVector, 0.3);
// ++ TENTH
- std::cout << " - INSERT (11,6)" << std::endl;
+ std::clog << " - INSERT (11,6)" << std::endl;
SimplexVector = {6, 11};
st.insert_simplex_and_subfaces(SimplexVector, 0.2);
// ++ ELEVENTH
- std::cout << " - INSERT (13,14,15)" << std::endl;
+ std::clog << " - INSERT (13,14,15)" << std::endl;
SimplexVector = {13, 14, 15};
st.insert_simplex_and_subfaces(SimplexVector, 0.25);
@@ -131,24 +131,24 @@ int main(int argc, char * const argv[]) {
/* An edge [10,12,2] */
- std::cout << "The complex contains " << st.num_simplices() << " simplices - " << st.num_vertices() << " vertices "
+ std::clog << "The complex contains " << st.num_simplices() << " simplices - " << st.num_vertices() << " vertices "
<< std::endl;
- std::cout << " - dimension " << st.dimension() << std::endl;
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:"
+ std::clog << " - dimension " << st.dimension() << std::endl;
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:"
<< std::endl;
- std::cout << "**************************************************************" << std::endl;
- std::cout << "strict graph G { " << std::endl;
+ std::clog << "**************************************************************" << std::endl;
+ std::clog << "strict graph G { " << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << static_cast<int>(vertex) << " -- ";
+ std::clog << static_cast<int>(vertex) << " -- ";
}
- std::cout << ";" << std::endl;
+ std::clog << ";" << std::endl;
}
- std::cout << "}" << std::endl;
- std::cout << "**************************************************************" << std::endl;
+ std::clog << "}" << std::endl;
+ std::clog << "**************************************************************" << std::endl;
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(st);
diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp
index 84333e46..236b67de 100644
--- a/src/Persistent_cohomology/example/plain_homology.cpp
+++ b/src/Persistent_cohomology/example/plain_homology.cpp
@@ -59,9 +59,6 @@ int main() {
st.insert_simplex_and_subfaces(edge35);
st.insert_simplex(vertex4);
- // Sort the simplices in the order of the filtration
- st.initialize_filtration();
-
// Class for homology computation
// By default, since the complex has dimension 1, only 0-dimensional homology would be computed.
// Here we also want persistent homology to be computed for the maximal dimension in the complex (persistence_dim_max = true)
@@ -83,9 +80,9 @@ int main() {
pcoh.output_diagram();
// Print the Betti numbers are b0=2 and b1=2.
- std::cout << std::endl;
- std::cout << "The Betti numbers are : ";
+ std::clog << std::endl;
+ std::clog << "The Betti numbers are : ";
for (int i = 0; i < 3; i++)
- std::cout << "b" << i << " = " << pcoh.betti_number(i) << " ; ";
- std::cout << std::endl;
+ std::clog << "b" << i << " = " << pcoh.betti_number(i) << " ; ";
+ std::clog << std::endl;
}
diff --git a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
index 9eb5ccfc..2edf5bc4 100644
--- a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
+++ b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
@@ -56,11 +56,8 @@ int main(int argc, char * argv[]) {
Simplex_tree simplex_tree;
rips_complex_from_file.create_complex(simplex_tree, dim_max);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
-
- // Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(simplex_tree);
@@ -99,7 +96,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
+ "Name of file in which the persistence diagram is written. Default print in std::clog")
("max-edge-length,r", po::value<Filtration_value>(&threshold)->default_value(0),
"Maximal length of an edge for the Rips complex construction.")
("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
@@ -123,20 +120,20 @@ void program_options(int argc, char * argv[]
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with various coefficient fields \n";
- std::cout << "of a Rips complex defined on a set of input points. The coefficient \n";
- std::cout << "fields are all the Z/rZ for a prime number r contained in the \n";
- std::cout << "specified range [p,q]\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p1*...*pr dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p1*...*pr is the product of prime numbers pi such that the homology \n";
- std::cout << "feature exists in homology with Z/piZ coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with various coefficient fields \n";
+ std::clog << "of a Rips complex defined on a set of input points. The coefficient \n";
+ std::clog << "fields are all the Z/rZ for a prime number r contained in the \n";
+ std::clog << "specified range [p,q]\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p1*...*pr dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p1*...*pr is the product of prime numbers pi such that the homology \n";
+ std::clog << "feature exists in homology with Z/piZ coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
index 02db05ec..a503d983 100644
--- a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
@@ -73,11 +73,8 @@ int main(int argc, char * argv[]) {
// expand the graph until dimension dim_max
st.expansion(dim_max);
- std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << st.dimension() << " \n";
-
- // Sort the simplices in the order of the filtration
- st.initialize_filtration();
+ std::clog << "The complex contains " << st.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << st.dimension() << " \n";
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(st);
@@ -115,7 +112,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
+ "Name of file in which the persistence diagram is written. Default print in std::clog")
("max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")
@@ -138,17 +135,17 @@ void program_options(int argc, char * argv[]
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Rips complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Rips complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
index 37fa5e93..db456f70 100644
--- a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
@@ -64,8 +64,8 @@ int main(int argc, char * argv[]) {
Simplex_tree& st = *new Simplex_tree;
rips_complex_from_file.create_complex(st, dim_max);
- std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << st.dimension() << " \n";
+ std::clog << "The complex contains " << st.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << st.dimension() << " \n";
#ifdef GUDHI_USE_TBB
// Unnecessary, but clarifies which operations are parallel.
@@ -122,7 +122,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
+ "Name of file in which the persistence diagram is written. Default print in std::clog")
("max-edge-length,r", po::value<Filtration_value>(&threshold)->default_value(0),
"Maximal length of an edge for the Rips complex construction.")
("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
@@ -144,17 +144,17 @@ void program_options(int argc, char * argv[]
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Rips complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Rips complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
index 0f1876d0..d34ee07d 100644
--- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
+++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
@@ -288,10 +288,7 @@ class Persistent_cohomology {
// with multiplicity. We used to sum the coefficients directly in
// annotations_in_boundary by using a map, we now do it later.
typedef std::pair<Column *, int> annotation_t;
-#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- thread_local
-#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- std::vector<annotation_t> annotations_in_boundary;
+ thread_local std::vector<annotation_t> annotations_in_boundary;
annotations_in_boundary.clear();
int sign = 1 - 2 * (dim_sigma % 2); // \in {-1,1} provides the sign in the
// alternate sum in the boundary.
@@ -564,35 +561,22 @@ class Persistent_cohomology {
void output_diagram(std::ostream& ostream = std::cout) {
cmp_intervals_by_length cmp(cpx_);
std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp);
- bool has_infinity = std::numeric_limits<Filtration_value>::has_infinity;
for (auto pair : persistent_pairs_) {
- // Special case on windows, inf is "1.#INF" (cf. unitary tests and R package TDA)
- if (has_infinity && cpx_->filtration(get<1>(pair)) == std::numeric_limits<Filtration_value>::infinity()) {
- ostream << get<2>(pair) << " " << cpx_->dimension(get<0>(pair)) << " "
- << cpx_->filtration(get<0>(pair)) << " inf " << std::endl;
- } else {
- ostream << get<2>(pair) << " " << cpx_->dimension(get<0>(pair)) << " "
- << cpx_->filtration(get<0>(pair)) << " "
- << cpx_->filtration(get<1>(pair)) << " " << std::endl;
- }
+ ostream << get<2>(pair) << " " << cpx_->dimension(get<0>(pair)) << " "
+ << cpx_->filtration(get<0>(pair)) << " "
+ << cpx_->filtration(get<1>(pair)) << " " << std::endl;
}
}
void write_output_diagram(std::string diagram_name) {
std::ofstream diagram_out(diagram_name.c_str());
+ diagram_out.exceptions(diagram_out.failbit);
cmp_intervals_by_length cmp(cpx_);
std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp);
- bool has_infinity = std::numeric_limits<Filtration_value>::has_infinity;
for (auto pair : persistent_pairs_) {
- // Special case on windows, inf is "1.#INF"
- if (has_infinity && cpx_->filtration(get<1>(pair)) == std::numeric_limits<Filtration_value>::infinity()) {
- diagram_out << cpx_->dimension(get<0>(pair)) << " "
- << cpx_->filtration(get<0>(pair)) << " inf" << std::endl;
- } else {
- diagram_out << cpx_->dimension(get<0>(pair)) << " "
- << cpx_->filtration(get<0>(pair)) << " "
- << cpx_->filtration(get<1>(pair)) << std::endl;
- }
+ diagram_out << cpx_->dimension(get<0>(pair)) << " "
+ << cpx_->filtration(get<0>(pair)) << " "
+ << cpx_->filtration(get<1>(pair)) << std::endl;
}
}
diff --git a/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
index b9f11607..7a2feeff 100644
--- a/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
+++ b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
@@ -82,7 +82,7 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
// 2 1 0 inf
// means that in Z/2Z-homology, the Betti numbers are b0=2 and b1=1.
- std::cout << "BETTI NUMBERS" << std::endl;
+ std::clog << "BETTI NUMBERS" << std::endl;
BOOST_CHECK(pcoh.betti_number(0) == 2);
BOOST_CHECK(pcoh.betti_number(1) == 1);
@@ -94,7 +94,7 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
BOOST_CHECK(bns[1] == 1);
BOOST_CHECK(bns[2] == 0);
- std::cout << "GET PERSISTENT PAIRS" << std::endl;
+ std::clog << "GET PERSISTENT PAIRS" << std::endl;
// Custom sort and output persistence
cmp_intervals_by_dim_then_length<Mini_simplex_tree> cmp(&st);
@@ -118,12 +118,12 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
BOOST_CHECK(st.filtration(get<0>(persistent_pairs[2])) == 0);
BOOST_CHECK(get<1>(persistent_pairs[2]) == st.null_simplex());
- std::cout << "INTERVALS IN DIMENSION" << std::endl;
+ std::clog << "INTERVALS IN DIMENSION" << std::endl;
auto intervals_in_dimension_0 = pcoh.intervals_in_dimension(0);
- std::cout << "intervals_in_dimension_0.size() = " << intervals_in_dimension_0.size() << std::endl;
+ std::clog << "intervals_in_dimension_0.size() = " << intervals_in_dimension_0.size() << std::endl;
for (std::size_t i = 0; i < intervals_in_dimension_0.size(); i++)
- std::cout << "intervals_in_dimension_0[" << i << "] = [" << intervals_in_dimension_0[i].first << "," <<
+ std::clog << "intervals_in_dimension_0[" << i << "] = [" << intervals_in_dimension_0[i].first << "," <<
intervals_in_dimension_0[i].second << "]" << std::endl;
BOOST_CHECK(intervals_in_dimension_0.size() == 2);
BOOST_CHECK(intervals_in_dimension_0[0].first == 0);
@@ -133,16 +133,16 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
auto intervals_in_dimension_1 = pcoh.intervals_in_dimension(1);
- std::cout << "intervals_in_dimension_1.size() = " << intervals_in_dimension_1.size() << std::endl;
+ std::clog << "intervals_in_dimension_1.size() = " << intervals_in_dimension_1.size() << std::endl;
for (std::size_t i = 0; i < intervals_in_dimension_1.size(); i++)
- std::cout << "intervals_in_dimension_1[" << i << "] = [" << intervals_in_dimension_1[i].first << "," <<
+ std::clog << "intervals_in_dimension_1[" << i << "] = [" << intervals_in_dimension_1[i].first << "," <<
intervals_in_dimension_1[i].second << "]" << std::endl;
BOOST_CHECK(intervals_in_dimension_1.size() == 1);
BOOST_CHECK(intervals_in_dimension_1[0].first == 0);
BOOST_CHECK(intervals_in_dimension_1[0].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
auto intervals_in_dimension_2 = pcoh.intervals_in_dimension(2);
- std::cout << "intervals_in_dimension_2.size() = " << intervals_in_dimension_2.size() << std::endl;
+ std::clog << "intervals_in_dimension_2.size() = " << intervals_in_dimension_2.size() << std::endl;
BOOST_CHECK(intervals_in_dimension_2.size() == 0);
}
@@ -259,12 +259,12 @@ BOOST_AUTO_TEST_CASE( betti_numbers )
BOOST_CHECK(st.filtration(get<0>(persistent_pairs[2])) == 1);
BOOST_CHECK(get<1>(persistent_pairs[2]) == st.null_simplex());
- std::cout << "INTERVALS IN DIMENSION" << std::endl;
+ std::clog << "INTERVALS IN DIMENSION" << std::endl;
auto intervals_in_dimension_0 = pcoh.intervals_in_dimension(0);
- std::cout << "intervals_in_dimension_0.size() = " << intervals_in_dimension_0.size() << std::endl;
+ std::clog << "intervals_in_dimension_0.size() = " << intervals_in_dimension_0.size() << std::endl;
for (std::size_t i = 0; i < intervals_in_dimension_0.size(); i++)
- std::cout << "intervals_in_dimension_0[" << i << "] = [" << intervals_in_dimension_0[i].first << "," <<
+ std::clog << "intervals_in_dimension_0[" << i << "] = [" << intervals_in_dimension_0[i].first << "," <<
intervals_in_dimension_0[i].second << "]" << std::endl;
BOOST_CHECK(intervals_in_dimension_0.size() == 2);
BOOST_CHECK(intervals_in_dimension_0[0].first == 2);
@@ -273,19 +273,19 @@ BOOST_AUTO_TEST_CASE( betti_numbers )
BOOST_CHECK(intervals_in_dimension_0[1].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
auto intervals_in_dimension_1 = pcoh.intervals_in_dimension(1);
- std::cout << "intervals_in_dimension_1.size() = " << intervals_in_dimension_1.size() << std::endl;
+ std::clog << "intervals_in_dimension_1.size() = " << intervals_in_dimension_1.size() << std::endl;
for (std::size_t i = 0; i < intervals_in_dimension_1.size(); i++)
- std::cout << "intervals_in_dimension_1[" << i << "] = [" << intervals_in_dimension_1[i].first << "," <<
+ std::clog << "intervals_in_dimension_1[" << i << "] = [" << intervals_in_dimension_1[i].first << "," <<
intervals_in_dimension_1[i].second << "]" << std::endl;
BOOST_CHECK(intervals_in_dimension_1.size() == 1);
BOOST_CHECK(intervals_in_dimension_1[0].first == 4);
BOOST_CHECK(intervals_in_dimension_1[0].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
auto intervals_in_dimension_2 = pcoh.intervals_in_dimension(2);
- std::cout << "intervals_in_dimension_2.size() = " << intervals_in_dimension_2.size() << std::endl;
+ std::clog << "intervals_in_dimension_2.size() = " << intervals_in_dimension_2.size() << std::endl;
BOOST_CHECK(intervals_in_dimension_2.size() == 0);
- std::cout << "EMPTY COMPLEX" << std::endl;
+ std::clog << "EMPTY COMPLEX" << std::endl;
Simplex_tree empty;
empty.initialize_filtration();
St_persistence pcoh_empty(empty, false);
diff --git a/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp b/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp
index a1c106d5..fe3f8517 100644
--- a/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp
+++ b/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp
@@ -30,7 +30,7 @@ std::string test_rips_persistence(int coefficient, int min_persistence) {
simplex_tree_stream.close();
// Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << " - dimension= " << st.dimension()
+ std::clog << "The complex contains " << st.num_simplices() << " simplices" << " - dimension= " << st.dimension()
<< std::endl;
// Check
@@ -76,11 +76,11 @@ void test_rips_persistence_in_dimension(int dimension) {
value8.insert(0,std::to_string(dimension));
value9.insert(0,std::to_string(dimension));
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=0" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=0" << std::endl;
std::string str_rips_persistence = test_rips_persistence(dimension, 0);
- std::cout << str_rips_persistence << std::endl;
+ std::clog << str_rips_persistence << std::endl;
BOOST_CHECK(str_rips_persistence.find(value0) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value1) != std::string::npos); // Check found
@@ -92,10 +92,10 @@ void test_rips_persistence_in_dimension(int dimension) {
BOOST_CHECK(str_rips_persistence.find(value7) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value8) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value9) != std::string::npos); // Check found
- std::cout << "str_rips_persistence=" << str_rips_persistence << std::endl;
+ std::clog << "str_rips_persistence=" << str_rips_persistence << std::endl;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=1" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=1" << std::endl;
str_rips_persistence = test_rips_persistence(dimension, 1);
@@ -109,10 +109,10 @@ void test_rips_persistence_in_dimension(int dimension) {
BOOST_CHECK(str_rips_persistence.find(value7) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value8) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value9) != std::string::npos); // Check found
- std::cout << "str_rips_persistence=" << str_rips_persistence << std::endl;
+ std::clog << "str_rips_persistence=" << str_rips_persistence << std::endl;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=2" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=2" << std::endl;
str_rips_persistence = test_rips_persistence(dimension, 2);
@@ -126,10 +126,10 @@ void test_rips_persistence_in_dimension(int dimension) {
BOOST_CHECK(str_rips_persistence.find(value7) == std::string::npos); // Check not found
BOOST_CHECK(str_rips_persistence.find(value8) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value9) != std::string::npos); // Check found
- std::cout << "str_rips_persistence=" << str_rips_persistence << std::endl;
+ std::clog << "str_rips_persistence=" << str_rips_persistence << std::endl;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=Inf" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_SINGLE_FIELD DIM=" << dimension << " MIN_PERS=Inf" << std::endl;
str_rips_persistence = test_rips_persistence(dimension, (std::numeric_limits<int>::max)());
@@ -143,7 +143,7 @@ void test_rips_persistence_in_dimension(int dimension) {
BOOST_CHECK(str_rips_persistence.find(value7) == std::string::npos); // Check not found
BOOST_CHECK(str_rips_persistence.find(value8) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value9) != std::string::npos); // Check found
- std::cout << "str_rips_persistence=" << str_rips_persistence << std::endl;
+ std::clog << "str_rips_persistence=" << str_rips_persistence << std::endl;
}
BOOST_AUTO_TEST_CASE( rips_persistent_cohomology_single_field_dim_1 )
diff --git a/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp b/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
index 9e767943..3602aa09 100644
--- a/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
+++ b/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
@@ -30,7 +30,7 @@ std::string test_rips_persistence(int min_coefficient, int max_coefficient, doub
simplex_tree_stream.close();
// Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << " - dimension= " << st.dimension()
+ std::clog << "The complex contains " << st.num_simplices() << " simplices" << " - dimension= " << st.dimension()
<< std::endl;
// Check
@@ -68,11 +68,11 @@ void test_rips_persistence_in_dimension(int min_dimension, int max_dimension) {
std::string value6(" 2 0.3 inf");
std::string value7(" 2 0.4 inf");
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_MULTI_FIELD MIN_DIM=" << min_dimension << " MAX_DIM=" << max_dimension << " MIN_PERS=0" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_MULTI_FIELD MIN_DIM=" << min_dimension << " MAX_DIM=" << max_dimension << " MIN_PERS=0" << std::endl;
std::string str_rips_persistence = test_rips_persistence(min_dimension, max_dimension, 0.0);
- std::cout << "str_rips_persistence=" << str_rips_persistence << std::endl;
+ std::clog << "str_rips_persistence=" << str_rips_persistence << std::endl;
BOOST_CHECK(str_rips_persistence.find(value0) != std::string::npos); // Check found
BOOST_CHECK(str_rips_persistence.find(value1) != std::string::npos); // Check found
diff --git a/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp b/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp
index 05bacb9f..3d2ba54f 100644
--- a/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp
+++ b/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp
@@ -63,18 +63,18 @@ int main() {
// have a reverse filtration (i.e. filtration of boundary of each simplex S
// is greater or equal to the filtration of S).
// ----------------------------------------------------------------------------
- std::cout << "Rips complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
+ std::clog << "Rips complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
<< stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> "
+ std::clog << ") -> "
<< "[" << stree.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << std::endl;
}
return 0;
diff --git a/src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp b/src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp
index bbc3c755..25f93b03 100644
--- a/src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp
+++ b/src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp
@@ -39,19 +39,19 @@ int main() {
// ----------------------------------------------------------------------------
// Display information about the one skeleton Rips complex
// ----------------------------------------------------------------------------
- std::cout << "Rips complex is of dimension " << stree.dimension() <<
+ std::clog << "Rips complex is of dimension " << stree.dimension() <<
" - " << stree.num_simplices() << " simplices - " <<
stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
+ std::clog << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
+ std::clog << std::endl;
}
return 0;
diff --git a/src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp b/src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp
index a1db8910..d9df245b 100644
--- a/src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp
+++ b/src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp
@@ -34,19 +34,19 @@ int main() {
// ----------------------------------------------------------------------------
// Display information about the one skeleton Rips complex
// ----------------------------------------------------------------------------
- std::cout << "Rips complex is of dimension " << stree.dimension() <<
+ std::clog << "Rips complex is of dimension " << stree.dimension() <<
" - " << stree.num_simplices() << " simplices - " <<
stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
+ std::clog << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
+ std::clog << std::endl;
}
return 0;
}
diff --git a/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp b/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
index b7040453..c0c57e7b 100644
--- a/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
+++ b/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
@@ -42,7 +42,7 @@ int main(int argc, char **argv) {
ouput_file_stream.open(std::string(argv[4]));
streambuffer = ouput_file_stream.rdbuf();
} else {
- streambuffer = std::cout.rdbuf();
+ streambuffer = std::clog.rdbuf();
}
Simplex_tree stree;
diff --git a/src/Rips_complex/example/example_rips_complex_from_off_file.cpp b/src/Rips_complex/example/example_rips_complex_from_off_file.cpp
index 36b468a7..9aa7a657 100644
--- a/src/Rips_complex/example/example_rips_complex_from_off_file.cpp
+++ b/src/Rips_complex/example/example_rips_complex_from_off_file.cpp
@@ -41,7 +41,7 @@ int main(int argc, char **argv) {
ouput_file_stream.open(std::string(argv[4]));
streambuffer = ouput_file_stream.rdbuf();
} else {
- streambuffer = std::cout.rdbuf();
+ streambuffer = std::clog.rdbuf();
}
Simplex_tree stree;
diff --git a/src/Rips_complex/example/example_sparse_rips.cpp b/src/Rips_complex/example/example_sparse_rips.cpp
index 1c95b48c..4bd31103 100644
--- a/src/Rips_complex/example/example_sparse_rips.cpp
+++ b/src/Rips_complex/example/example_sparse_rips.cpp
@@ -25,6 +25,6 @@ int main() {
// ----------------------------------------------------------------------------
// Display information about the complex
// ----------------------------------------------------------------------------
- std::cout << "Sparse Rips complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ std::clog << "Sparse Rips complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
<< " simplices - " << stree.num_vertices() << " vertices." << std::endl;
}
diff --git a/src/Rips_complex/test/test_rips_complex.cpp b/src/Rips_complex/test/test_rips_complex.cpp
index 1225f8df..19dcd283 100644
--- a/src/Rips_complex/test/test_rips_complex.cpp
+++ b/src/Rips_complex/test/test_rips_complex.cpp
@@ -43,7 +43,7 @@ BOOST_AUTO_TEST_CASE(RIPS_DOC_OFF_file) {
// ----------------------------------------------------------------------------
std::string off_file_name("alphacomplexdoc.off");
double rips_threshold = 12.0;
- std::cout << "========== OFF FILE NAME = " << off_file_name << " - Rips threshold=" <<
+ std::clog << "========== OFF FILE NAME = " << off_file_name << " - Rips threshold=" <<
rips_threshold << "==========" << std::endl;
Gudhi::Points_off_reader<Point> off_reader(off_file_name);
@@ -52,14 +52,14 @@ BOOST_AUTO_TEST_CASE(RIPS_DOC_OFF_file) {
const int DIMENSION_1 = 1;
Simplex_tree st;
rips_complex_from_file.create_complex(st, DIMENSION_1);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
BOOST_CHECK(st.dimension() == DIMENSION_1);
const int NUMBER_OF_VERTICES = 7;
- std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ std::clog << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ std::clog << "st.num_simplices()=" << st.num_simplices() << std::endl;
BOOST_CHECK(st.num_simplices() == 18);
// Check filtration values of vertices is 0.0
@@ -71,12 +71,12 @@ BOOST_AUTO_TEST_CASE(RIPS_DOC_OFF_file) {
for (auto f_simplex : st.skeleton_simplex_range(DIMENSION_1)) {
if (DIMENSION_1 == st.dimension(f_simplex)) {
std::vector<Point> vp;
- std::cout << "vertex = (";
+ std::clog << "vertex = (";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << ",";
+ std::clog << vertex << ",";
vp.push_back(off_reader.get_point_cloud().at(vertex));
}
- std::cout << ") - distance =" << Gudhi::Euclidean_distance()(vp.at(0), vp.at(1)) <<
+ std::clog << ") - distance =" << Gudhi::Euclidean_distance()(vp.at(0), vp.at(1)) <<
" - filtration =" << st.filtration(f_simplex) << std::endl;
BOOST_CHECK(vp.size() == 2);
GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), Gudhi::Euclidean_distance()(vp.at(0), vp.at(1)));
@@ -86,46 +86,46 @@ BOOST_AUTO_TEST_CASE(RIPS_DOC_OFF_file) {
const int DIMENSION_2 = 2;
Simplex_tree st2;
rips_complex_from_file.create_complex(st2, DIMENSION_2);
- std::cout << "st2.dimension()=" << st2.dimension() << std::endl;
+ std::clog << "st2.dimension()=" << st2.dimension() << std::endl;
BOOST_CHECK(st2.dimension() == DIMENSION_2);
- std::cout << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
+ std::clog << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
BOOST_CHECK(st2.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
+ std::clog << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
BOOST_CHECK(st2.num_simplices() == 23);
Simplex_tree::Filtration_value f01 = st2.filtration(st2.find({0, 1}));
Simplex_tree::Filtration_value f02 = st2.filtration(st2.find({0, 2}));
Simplex_tree::Filtration_value f12 = st2.filtration(st2.find({1, 2}));
Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
- std::cout << "f012= " << f012 << " | f01= " << f01 << " - f02= " << f02 << " - f12= " << f12 << std::endl;
+ std::clog << "f012= " << f012 << " | f01= " << f01 << " - f02= " << f02 << " - f12= " << f12 << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::max(f01, std::max(f02,f12)));
Simplex_tree::Filtration_value f45 = st2.filtration(st2.find({4, 5}));
Simplex_tree::Filtration_value f56 = st2.filtration(st2.find({5, 6}));
Simplex_tree::Filtration_value f46 = st2.filtration(st2.find({4, 6}));
Simplex_tree::Filtration_value f456 = st2.filtration(st2.find({4, 5, 6}));
- std::cout << "f456= " << f456 << " | f45= " << f45 << " - f56= " << f56 << " - f46= " << f46 << std::endl;
+ std::clog << "f456= " << f456 << " | f45= " << f45 << " - f56= " << f56 << " - f46= " << f46 << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f456, std::max(f45, std::max(f56,f46)));
const int DIMENSION_3 = 3;
Simplex_tree st3;
rips_complex_from_file.create_complex(st3, DIMENSION_3);
- std::cout << "st3.dimension()=" << st3.dimension() << std::endl;
+ std::clog << "st3.dimension()=" << st3.dimension() << std::endl;
BOOST_CHECK(st3.dimension() == DIMENSION_3);
- std::cout << "st3.num_vertices()=" << st3.num_vertices() << std::endl;
+ std::clog << "st3.num_vertices()=" << st3.num_vertices() << std::endl;
BOOST_CHECK(st3.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st3.num_simplices()=" << st3.num_simplices() << std::endl;
+ std::clog << "st3.num_simplices()=" << st3.num_simplices() << std::endl;
BOOST_CHECK(st3.num_simplices() == 24);
Simplex_tree::Filtration_value f123 = st3.filtration(st3.find({1, 2, 3}));
Simplex_tree::Filtration_value f013 = st3.filtration(st3.find({0, 1, 3}));
Simplex_tree::Filtration_value f023 = st3.filtration(st3.find({0, 2, 3}));
Simplex_tree::Filtration_value f0123 = st3.filtration(st3.find({0, 1, 2, 3}));
- std::cout << "f0123= " << f0123 << " | f012= " << f012 << " - f123= " << f123 << " - f013= " << f013 <<
+ std::clog << "f0123= " << f0123 << " | f012= " << f012 << " - f123= " << f123 << " - f013= " << f013 <<
" - f023= " << f023 << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f0123, std::max(f012, std::max(f123, std::max(f013, f023))));
@@ -176,34 +176,34 @@ BOOST_AUTO_TEST_CASE(Rips_complex_from_points) {
// ----------------------------------------------------------------------------
Rips_complex rips_complex_from_points(points, 2.0, Custom_square_euclidean_distance());
- std::cout << "========== Rips_complex_from_points ==========" << std::endl;
+ std::clog << "========== Rips_complex_from_points ==========" << std::endl;
Simplex_tree st;
const int DIMENSION = 3;
rips_complex_from_points.create_complex(st, DIMENSION);
// Another way to check num_simplices
- std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
for (auto f_simplex : st.filtration_simplex_range()) {
num_simplices++;
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> " << "[" << st.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << ") -> " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << std::endl;
}
BOOST_CHECK(num_simplices == 15);
- std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ std::clog << "st.num_simplices()=" << st.num_simplices() << std::endl;
BOOST_CHECK(st.num_simplices() == 15);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
BOOST_CHECK(st.dimension() == DIMENSION);
- std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ std::clog << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == 4);
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
+ std::clog << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
switch (st.dimension(f_simplex)) {
case 0:
GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), 0.0);
@@ -241,34 +241,34 @@ BOOST_AUTO_TEST_CASE(Sparse_rips_complex_from_points) {
// .001 is small enough that we get a deterministic result matching the exact Rips
Sparse_rips_complex sparse_rips(points, Custom_square_euclidean_distance(), .001);
- std::cout << "========== Sparse_rips_complex_from_points ==========" << std::endl;
+ std::clog << "========== Sparse_rips_complex_from_points ==========" << std::endl;
Simplex_tree st;
const int DIMENSION = 3;
sparse_rips.create_complex(st, DIMENSION);
// Another way to check num_simplices
- std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
for (auto f_simplex : st.filtration_simplex_range()) {
num_simplices++;
- std::cout << " ( ";
+ std::clog << " ( ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << ") -> " << "[" << st.filtration(f_simplex) << "] ";
- std::cout << std::endl;
+ std::clog << ") -> " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << std::endl;
}
BOOST_CHECK(num_simplices == 15);
- std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ std::clog << "st.num_simplices()=" << st.num_simplices() << std::endl;
BOOST_CHECK(st.num_simplices() == 15);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
BOOST_CHECK(st.dimension() == DIMENSION);
- std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ std::clog << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == 4);
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
+ std::clog << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
switch (st.dimension(f_simplex)) {
case 0:
GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), 0.0);
@@ -293,7 +293,7 @@ BOOST_AUTO_TEST_CASE(Rips_doc_csv_file) {
// ----------------------------------------------------------------------------
std::string csv_file_name("full_square_distance_matrix.csv");
double rips_threshold = 12.0;
- std::cout << "========== CSV FILE NAME = " << csv_file_name << " - Rips threshold=" <<
+ std::clog << "========== CSV FILE NAME = " << csv_file_name << " - Rips threshold=" <<
rips_threshold << "==========" << std::endl;
Distance_matrix distances = Gudhi::read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
@@ -302,14 +302,14 @@ BOOST_AUTO_TEST_CASE(Rips_doc_csv_file) {
const int DIMENSION_1 = 1;
Simplex_tree st;
rips_complex_from_file.create_complex(st, DIMENSION_1);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
BOOST_CHECK(st.dimension() == DIMENSION_1);
const int NUMBER_OF_VERTICES = 7;
- std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ std::clog << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ std::clog << "st.num_simplices()=" << st.num_simplices() << std::endl;
BOOST_CHECK(st.num_simplices() == 18);
// Check filtration values of vertices is 0.0
@@ -321,12 +321,12 @@ BOOST_AUTO_TEST_CASE(Rips_doc_csv_file) {
for (auto f_simplex : st.skeleton_simplex_range(DIMENSION_1)) {
if (DIMENSION_1 == st.dimension(f_simplex)) {
std::vector<Simplex_tree::Vertex_handle> vvh;
- std::cout << "vertex = (";
+ std::clog << "vertex = (";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << ",";
+ std::clog << vertex << ",";
vvh.push_back(vertex);
}
- std::cout << ") - filtration =" << st.filtration(f_simplex) << std::endl;
+ std::clog << ") - filtration =" << st.filtration(f_simplex) << std::endl;
BOOST_CHECK(vvh.size() == 2);
GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), distances[vvh.at(0)][vvh.at(1)]);
}
@@ -335,46 +335,46 @@ BOOST_AUTO_TEST_CASE(Rips_doc_csv_file) {
const int DIMENSION_2 = 2;
Simplex_tree st2;
rips_complex_from_file.create_complex(st2, DIMENSION_2);
- std::cout << "st2.dimension()=" << st2.dimension() << std::endl;
+ std::clog << "st2.dimension()=" << st2.dimension() << std::endl;
BOOST_CHECK(st2.dimension() == DIMENSION_2);
- std::cout << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
+ std::clog << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
BOOST_CHECK(st2.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
+ std::clog << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
BOOST_CHECK(st2.num_simplices() == 23);
Simplex_tree::Filtration_value f01 = st2.filtration(st2.find({0, 1}));
Simplex_tree::Filtration_value f02 = st2.filtration(st2.find({0, 2}));
Simplex_tree::Filtration_value f12 = st2.filtration(st2.find({1, 2}));
Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
- std::cout << "f012= " << f012 << " | f01= " << f01 << " - f02= " << f02 << " - f12= " << f12 << std::endl;
+ std::clog << "f012= " << f012 << " | f01= " << f01 << " - f02= " << f02 << " - f12= " << f12 << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::max(f01, std::max(f02,f12)));
Simplex_tree::Filtration_value f45 = st2.filtration(st2.find({4, 5}));
Simplex_tree::Filtration_value f56 = st2.filtration(st2.find({5, 6}));
Simplex_tree::Filtration_value f46 = st2.filtration(st2.find({4, 6}));
Simplex_tree::Filtration_value f456 = st2.filtration(st2.find({4, 5, 6}));
- std::cout << "f456= " << f456 << " | f45= " << f45 << " - f56= " << f56 << " - f46= " << f46 << std::endl;
+ std::clog << "f456= " << f456 << " | f45= " << f45 << " - f56= " << f56 << " - f46= " << f46 << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f456, std::max(f45, std::max(f56,f46)));
const int DIMENSION_3 = 3;
Simplex_tree st3;
rips_complex_from_file.create_complex(st3, DIMENSION_3);
- std::cout << "st3.dimension()=" << st3.dimension() << std::endl;
+ std::clog << "st3.dimension()=" << st3.dimension() << std::endl;
BOOST_CHECK(st3.dimension() == DIMENSION_3);
- std::cout << "st3.num_vertices()=" << st3.num_vertices() << std::endl;
+ std::clog << "st3.num_vertices()=" << st3.num_vertices() << std::endl;
BOOST_CHECK(st3.num_vertices() == NUMBER_OF_VERTICES);
- std::cout << "st3.num_simplices()=" << st3.num_simplices() << std::endl;
+ std::clog << "st3.num_simplices()=" << st3.num_simplices() << std::endl;
BOOST_CHECK(st3.num_simplices() == 24);
Simplex_tree::Filtration_value f123 = st3.filtration(st3.find({1, 2, 3}));
Simplex_tree::Filtration_value f013 = st3.filtration(st3.find({0, 1, 3}));
Simplex_tree::Filtration_value f023 = st3.filtration(st3.find({0, 2, 3}));
Simplex_tree::Filtration_value f0123 = st3.filtration(st3.find({0, 1, 2, 3}));
- std::cout << "f0123= " << f0123 << " | f012= " << f012 << " - f123= " << f123 << " - f013= " << f013 <<
+ std::clog << "f0123= " << f0123 << " | f012= " << f012 << " - f123= " << f123 << " - f013= " << f013 <<
" - f023= " << f023 << std::endl;
GUDHI_TEST_FLOAT_EQUALITY_CHECK(f0123, std::max(f012, std::max(f123, std::max(f013, f023))));
@@ -389,7 +389,7 @@ BOOST_AUTO_TEST_CASE(Rips_create_complex_throw) {
// ----------------------------------------------------------------------------
std::string off_file_name("alphacomplexdoc.off");
double rips_threshold = 12.0;
- std::cout << "========== OFF FILE NAME = " << off_file_name << " - Rips threshold=" <<
+ std::clog << "========== OFF FILE NAME = " << off_file_name << " - Rips threshold=" <<
rips_threshold << "==========" << std::endl;
Gudhi::Points_off_reader<Point> off_reader(off_file_name);
@@ -398,7 +398,7 @@ BOOST_AUTO_TEST_CASE(Rips_create_complex_throw) {
Simplex_tree stree;
std::vector<int> simplex = {0, 1, 2};
stree.insert_simplex_and_subfaces(simplex);
- std::cout << "Check exception throw in debug mode" << std::endl;
+ std::clog << "Check exception throw in debug mode" << std::endl;
// throw excpt because stree is not empty
BOOST_CHECK_THROW (rips_complex_from_file.create_complex(stree, 1), std::invalid_argument);
}
diff --git a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
index 585de4a0..b473738e 100644
--- a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
@@ -68,11 +68,8 @@ int main(int argc, char* argv[]) {
Simplex_tree simplex_tree;
rips_complex_from_file.create_complex(simplex_tree, dim_max);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
-
- // Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(simplex_tree);
@@ -121,7 +118,7 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"min-edge-corelation,c", po::value<Filtration_value>(&correlation_min)->default_value(0),
"Minimal corelation of an edge for the Rips complex construction.")(
"cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
@@ -143,17 +140,17 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Rips complex defined on a corelation matrix.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Rips complex defined on a corelation matrix.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
index ad429e11..6306755d 100644
--- a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
@@ -47,11 +47,8 @@ int main(int argc, char* argv[]) {
Simplex_tree simplex_tree;
rips_complex_from_file.create_complex(simplex_tree, dim_max);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
-
- // Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(simplex_tree);
@@ -82,7 +79,7 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
@@ -105,17 +102,17 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Rips complex defined on a set of distance matrix.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Rips complex defined on a set of distance matrix.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Rips_complex/utilities/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp
index daa7e1db..9d7490b3 100644
--- a/src/Rips_complex/utilities/rips_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_persistence.cpp
@@ -49,11 +49,8 @@ int main(int argc, char* argv[]) {
Simplex_tree simplex_tree;
rips_complex_from_file.create_complex(simplex_tree, dim_max);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
-
- // Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(simplex_tree);
@@ -84,7 +81,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
@@ -107,17 +104,17 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Rips complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Rips complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Rips_complex/utilities/sparse_rips_persistence.cpp b/src/Rips_complex/utilities/sparse_rips_persistence.cpp
index cefd8a67..ac935b41 100644
--- a/src/Rips_complex/utilities/sparse_rips_persistence.cpp
+++ b/src/Rips_complex/utilities/sparse_rips_persistence.cpp
@@ -51,11 +51,8 @@ int main(int argc, char* argv[]) {
Simplex_tree simplex_tree;
sparse_rips.create_complex(simplex_tree, dim_max);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
-
- // Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(simplex_tree);
@@ -87,7 +84,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
@@ -112,17 +109,17 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a sparse 1/(1-epsilon)-approximation of the Rips complex \ndefined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a sparse 1/(1-epsilon)-approximation of the Rips complex \ndefined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
index d716fb1f..0e7e382b 100644
--- a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
+++ b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
@@ -55,18 +55,18 @@ class Cech_blocker {
bool operator()(Simplex_handle sh) {
std::vector<Point> points;
#if DEBUG_TRACES
- std::cout << "Cech_blocker on [";
+ std::clog << "Cech_blocker on [";
#endif // DEBUG_TRACES
for (auto vertex : simplex_tree_.simplex_vertex_range(sh)) {
points.push_back(point_cloud_[vertex]);
#if DEBUG_TRACES
- std::cout << vertex << ", ";
+ std::clog << vertex << ", ";
#endif // DEBUG_TRACES
}
Min_sphere ms(points.begin(), points.end());
Filtration_value radius = ms.radius();
#if DEBUG_TRACES
- std::cout << "] - radius = " << radius << " - returns " << (radius > threshold_) << std::endl;
+ std::clog << "] - radius = " << radius << " - returns " << (radius > threshold_) << std::endl;
#endif // DEBUG_TRACES
simplex_tree_.assign_filtration(sh, radius);
return (radius > threshold_);
@@ -106,24 +106,24 @@ int main(int argc, char* argv[]) {
// expand the graph until dimension dim_max
st.expansion_with_blockers(dim_max, Cech_blocker(st, threshold, off_reader.get_point_cloud()));
- std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << st.dimension() << " \n";
+ std::clog << "The complex contains " << st.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << st.dimension() << " \n";
// Sort the simplices in the order of the filtration
st.initialize_filtration();
#if DEBUG_TRACES
- std::cout << "********************************************************************\n";
+ std::clog << "********************************************************************\n";
// Display the Simplex_tree - Can not be done in the middle of 2 inserts
- std::cout << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " "
+ std::clog << " "
<< "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << static_cast<int>(vertex) << " ";
+ std::clog << static_cast<int>(vertex) << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
#endif // DEBUG_TRACES
return 0;
@@ -154,11 +154,11 @@ void program_options(int argc, char* argv[], std::string& off_file_points, Filtr
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Construct a Cech complex defined on a set of input points.\n \n";
+ std::clog << std::endl;
+ std::clog << "Construct a Cech complex defined on a set of input points.\n \n";
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
index e455c426..8ee7ab74 100644
--- a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
+++ b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
@@ -63,7 +63,7 @@ Vertex_list from(const Cell_handle& ch) {
Vertex_list the_list;
for (auto i = 0; i < 4; i++) {
#ifdef DEBUG_TRACES
- std::cout << "from cell[" << i << "]=" << ch->vertex(i)->point() << std::endl;
+ std::clog << "from cell[" << i << "]=" << ch->vertex(i)->point() << std::endl;
#endif // DEBUG_TRACES
the_list.push_back(ch->vertex(i));
}
@@ -75,7 +75,7 @@ Vertex_list from(const Facet& fct) {
for (auto i = 0; i < 4; i++) {
if (fct.second != i) {
#ifdef DEBUG_TRACES
- std::cout << "from facet=[" << i << "]" << fct.first->vertex(i)->point() << std::endl;
+ std::clog << "from facet=[" << i << "]" << fct.first->vertex(i)->point() << std::endl;
#endif // DEBUG_TRACES
the_list.push_back(fct.first->vertex(i));
}
@@ -88,7 +88,7 @@ Vertex_list from(const Edge& edg) {
for (auto i = 0; i < 4; i++) {
if ((edg.second == i) || (edg.third == i)) {
#ifdef DEBUG_TRACES
- std::cout << "from edge[" << i << "]=" << edg.first->vertex(i)->point() << std::endl;
+ std::clog << "from edge[" << i << "]=" << edg.first->vertex(i)->point() << std::endl;
#endif // DEBUG_TRACES
the_list.push_back(edg.first->vertex(i));
}
@@ -99,7 +99,7 @@ Vertex_list from(const Edge& edg) {
Vertex_list from(const Alpha_shape_3::Vertex_handle& vh) {
Vertex_list the_list;
#ifdef DEBUG_TRACES
- std::cout << "from vertex=" << vh->point() << std::endl;
+ std::clog << "from vertex=" << vh->point() << std::endl;
#endif // DEBUG_TRACES
the_list.push_back(vh);
return the_list;
@@ -128,7 +128,7 @@ int main(int argc, char * const argv[]) {
// alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL);
#ifdef DEBUG_TRACES
- std::cout << "Alpha shape computed in GENERAL mode" << std::endl;
+ std::clog << "Alpha shape computed in GENERAL mode" << std::endl;
#endif // DEBUG_TRACES
// filtration with alpha values from alpha shape
@@ -140,7 +140,7 @@ int main(int argc, char * const argv[]) {
as.filtration_with_alpha_values(disp);
#ifdef DEBUG_TRACES
- std::cout << "filtration_with_alpha_values returns : " << the_objects.size() << " objects" << std::endl;
+ std::clog << "filtration_with_alpha_values returns : " << the_objects.size() << " objects" << std::endl;
#endif // DEBUG_TRACES
Alpha_shape_3::size_type count_vertices = 0;
@@ -177,7 +177,7 @@ int main(int argc, char * const argv[]) {
// alpha shape not found
Simplex_tree_vertex vertex = map_cgal_simplex_tree.size();
#ifdef DEBUG_TRACES
- std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] not found - insert_simplex " << vertex << "\n";
+ std::clog << "vertex [" << the_alpha_shape_vertex->point() << "] not found - insert_simplex " << vertex << "\n";
#endif // DEBUG_TRACES
the_simplex_tree.push_back(vertex);
map_cgal_simplex_tree.insert(Alpha_shape_simplex_tree_pair(the_alpha_shape_vertex, vertex));
@@ -185,14 +185,14 @@ int main(int argc, char * const argv[]) {
// alpha shape found
Simplex_tree_vertex vertex = the_map_iterator->second;
#ifdef DEBUG_TRACES
- std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] found in " << vertex << std::endl;
+ std::clog << "vertex [" << the_alpha_shape_vertex->point() << "] found in " << vertex << std::endl;
#endif // DEBUG_TRACES
the_simplex_tree.push_back(vertex);
}
}
// Construction of the simplex_tree
#ifdef DEBUG_TRACES
- std::cout << "filtration = " << *the_alpha_value_iterator << std::endl;
+ std::clog << "filtration = " << *the_alpha_value_iterator << std::endl;
#endif // DEBUG_TRACES
simplex_tree.insert_simplex(the_simplex_tree, std::sqrt(*the_alpha_value_iterator));
if (the_alpha_value_iterator != the_alpha_values.end())
@@ -201,61 +201,61 @@ int main(int argc, char * const argv[]) {
std::cerr << "This shall not happen" << std::endl;
}
#ifdef DEBUG_TRACES
- std::cout << "vertices \t\t" << count_vertices << std::endl;
- std::cout << "edges \t\t" << count_edges << std::endl;
- std::cout << "facets \t\t" << count_facets << std::endl;
- std::cout << "cells \t\t" << count_cells << std::endl;
+ std::clog << "vertices \t\t" << count_vertices << std::endl;
+ std::clog << "edges \t\t" << count_edges << std::endl;
+ std::clog << "facets \t\t" << count_facets << std::endl;
+ std::clog << "cells \t\t" << count_cells << std::endl;
- std::cout << "Information of the Simplex Tree:\n";
- std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
- std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
+ std::clog << "Information of the Simplex Tree:\n";
+ std::clog << " Number of vertices = " << simplex_tree.num_vertices() << " ";
+ std::clog << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
- std::cout << "Iterator on vertices: \n";
+ std::clog << "Iterator on vertices: \n";
for (auto vertex : simplex_tree.complex_vertex_range()) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
#endif // DEBUG_TRACES
- std::cout << simplex_tree << std::endl;
+ std::clog << simplex_tree << std::endl;
#ifdef DEBUG_TRACES
- std::cout << std::endl << std::endl << "Iterator on simplices:\n";
+ std::clog << std::endl << std::endl << "Iterator on simplices:\n";
for (auto simplex : simplex_tree.complex_simplex_range()) {
- std::cout << " ";
+ std::clog << " ";
for (auto vertex : simplex_tree.simplex_vertex_range(simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, and their boundary simplices:\n";
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, and their boundary simplices:\n";
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
for (auto b_simplex : simplex_tree.boundary_simplex_range(f_simplex)) {
- std::cout << " " << "[" << simplex_tree.filtration(b_simplex) << "] ";
+ std::clog << " " << "[" << simplex_tree.filtration(b_simplex) << "] ";
for (auto vertex : simplex_tree.simplex_vertex_range(b_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
#endif // DEBUG_TRACES
diff --git a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
index 494f8b1d..df52bf43 100644
--- a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
+++ b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
@@ -34,31 +34,31 @@ int main(int argc, char* const argv[]) {
stree.expansion_with_blockers(3, [&](Simplex_handle sh) {
bool result = false;
- std::cout << "Blocker on [";
+ std::clog << "Blocker on [";
// User can loop on the vertices from the given simplex_handle i.e.
for (auto vertex : stree.simplex_vertex_range(sh)) {
// We block the expansion, if the vertex '6' is in the given list of vertices
if (vertex == 6) result = true;
- std::cout << vertex << ", ";
+ std::clog << vertex << ", ";
}
- std::cout << "] ( " << stree.filtration(sh);
+ std::clog << "] ( " << stree.filtration(sh);
// User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
stree.assign_filtration(sh, stree.filtration(sh) + 1.);
- std::cout << " + 1. ) = " << result << std::endl;
+ std::clog << " + 1. ) = " << result << std::endl;
return result;
});
- std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << stree.num_simplices() << " simplices";
- std::cout << " - dimension " << stree.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* The complex contains " << stree.num_simplices() << " simplices";
+ std::clog << " - dimension " << stree.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : stree.filtration_simplex_range()) {
- std::cout << " "
+ std::clog << " "
<< "[" << stree.filtration(f_simplex) << "] ";
- for (auto vertex : stree.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
return 0;
diff --git a/src/Simplex_tree/example/mini_simplex_tree.cpp b/src/Simplex_tree/example/mini_simplex_tree.cpp
index bbc582c7..4043bffd 100644
--- a/src/Simplex_tree/example/mini_simplex_tree.cpp
+++ b/src/Simplex_tree/example/mini_simplex_tree.cpp
@@ -48,7 +48,7 @@ int main() {
for (ST::Simplex_handle t : st.cofaces_simplex_range(e, 1)) {
// Only coface is 012
for (ST::Vertex_handle v : st.simplex_vertex_range(t)) // v in { 0, 1, 2 }
- std::cout << v;
- std::cout << '\n';
+ std::clog << v;
+ std::clog << '\n';
}
}
diff --git a/src/Simplex_tree/example/simple_simplex_tree.cpp b/src/Simplex_tree/example/simple_simplex_tree.cpp
index 47ea7e36..e8bec596 100644
--- a/src/Simplex_tree/example/simple_simplex_tree.cpp
+++ b/src/Simplex_tree/example/simple_simplex_tree.cpp
@@ -28,8 +28,8 @@ int main(int argc, char* const argv[]) {
const Filtration_value FOURTH_FILTRATION_VALUE = 0.4;
// TEST OF INSERTION
- std::cout << "********************************************************************" << std::endl;
- std::cout << "EXAMPLE OF SIMPLE INSERTION" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "EXAMPLE OF SIMPLE INSERTION" << std::endl;
// Construct the Simplex Tree
Simplex_tree simplexTree;
@@ -41,149 +41,139 @@ int main(int argc, char* const argv[]) {
/* 2 0 3 */
// ++ FIRST
- std::cout << " * INSERT 0" << std::endl;
+ std::clog << " * INSERT 0" << std::endl;
typeVectorVertex firstSimplexVector = {0};
typePairSimplexBool returnValue =
simplexTree.insert_simplex(firstSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + 0 INSERTED" << std::endl;
+ std::clog << " + 0 INSERTED" << std::endl;
} else {
- std::cout << " - 0 NOT INSERTED" << std::endl;
+ std::clog << " - 0 NOT INSERTED" << std::endl;
}
// ++ SECOND
- std::cout << " * INSERT 1" << std::endl;
+ std::clog << " * INSERT 1" << std::endl;
typeVectorVertex secondSimplexVector = {1};
returnValue = simplexTree.insert_simplex(secondSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + 1 INSERTED" << std::endl;
+ std::clog << " + 1 INSERTED" << std::endl;
} else {
- std::cout << " - 1 NOT INSERTED" << std::endl;
+ std::clog << " - 1 NOT INSERTED" << std::endl;
}
// ++ THIRD
- std::cout << " * INSERT (0,1)" << std::endl;
+ std::clog << " * INSERT (0,1)" << std::endl;
typeVectorVertex thirdSimplexVector = {0, 1};
returnValue = simplexTree.insert_simplex(thirdSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + (0,1) INSERTED" << std::endl;
+ std::clog << " + (0,1) INSERTED" << std::endl;
} else {
- std::cout << " - (0,1) NOT INSERTED" << std::endl;
+ std::clog << " - (0,1) NOT INSERTED" << std::endl;
}
// ++ FOURTH
- std::cout << " * INSERT 2" << std::endl;
+ std::clog << " * INSERT 2" << std::endl;
typeVectorVertex fourthSimplexVector = {2};
returnValue = simplexTree.insert_simplex(fourthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + 2 INSERTED" << std::endl;
+ std::clog << " + 2 INSERTED" << std::endl;
} else {
- std::cout << " - 2 NOT INSERTED" << std::endl;
+ std::clog << " - 2 NOT INSERTED" << std::endl;
}
// ++ FIFTH
- std::cout << " * INSERT (2,0)" << std::endl;
+ std::clog << " * INSERT (2,0)" << std::endl;
typeVectorVertex fifthSimplexVector = {2, 0};
returnValue = simplexTree.insert_simplex(fifthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + (2,0) INSERTED" << std::endl;
+ std::clog << " + (2,0) INSERTED" << std::endl;
} else {
- std::cout << " - (2,0) NOT INSERTED" << std::endl;
+ std::clog << " - (2,0) NOT INSERTED" << std::endl;
}
// ++ SIXTH
- std::cout << " * INSERT (2,1)" << std::endl;
+ std::clog << " * INSERT (2,1)" << std::endl;
typeVectorVertex sixthSimplexVector = {2, 1};
returnValue = simplexTree.insert_simplex(sixthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + (2,1) INSERTED" << std::endl;
+ std::clog << " + (2,1) INSERTED" << std::endl;
} else {
- std::cout << " - (2,1) NOT INSERTED" << std::endl;
+ std::clog << " - (2,1) NOT INSERTED" << std::endl;
}
// ++ SEVENTH
- std::cout << " * INSERT (2,1,0)" << std::endl;
+ std::clog << " * INSERT (2,1,0)" << std::endl;
typeVectorVertex seventhSimplexVector = {2, 1, 0};
returnValue = simplexTree.insert_simplex(seventhSimplexVector, Filtration_value(THIRD_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + (2,1,0) INSERTED" << std::endl;
+ std::clog << " + (2,1,0) INSERTED" << std::endl;
} else {
- std::cout << " - (2,1,0) NOT INSERTED" << std::endl;
+ std::clog << " - (2,1,0) NOT INSERTED" << std::endl;
}
// ++ EIGHTH
- std::cout << " * INSERT 3" << std::endl;
+ std::clog << " * INSERT 3" << std::endl;
typeVectorVertex eighthSimplexVector = {3};
returnValue = simplexTree.insert_simplex(eighthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + 3 INSERTED" << std::endl;
+ std::clog << " + 3 INSERTED" << std::endl;
} else {
- std::cout << " - 3 NOT INSERTED" << std::endl;
+ std::clog << " - 3 NOT INSERTED" << std::endl;
}
// ++ NINETH
- std::cout << " * INSERT (3,0)" << std::endl;
+ std::clog << " * INSERT (3,0)" << std::endl;
typeVectorVertex ninethSimplexVector = {3, 0};
returnValue = simplexTree.insert_simplex(ninethSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + (3,0) INSERTED" << std::endl;
+ std::clog << " + (3,0) INSERTED" << std::endl;
} else {
- std::cout << " - (3,0) NOT INSERTED" << std::endl;
+ std::clog << " - (3,0) NOT INSERTED" << std::endl;
}
// ++ TENTH
- std::cout << " * INSERT 0 (already inserted)" << std::endl;
+ std::clog << " * INSERT 0 (already inserted)" << std::endl;
typeVectorVertex tenthSimplexVector = {0};
// With a different filtration value
returnValue = simplexTree.insert_simplex(tenthSimplexVector, Filtration_value(FOURTH_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + 0 INSERTED" << std::endl;
+ std::clog << " + 0 INSERTED" << std::endl;
} else {
- std::cout << " - 0 NOT INSERTED" << std::endl;
+ std::clog << " - 0 NOT INSERTED" << std::endl;
}
// ++ ELEVENTH
- std::cout << " * INSERT (2,1,0) (already inserted)" << std::endl;
+ std::clog << " * INSERT (2,1,0) (already inserted)" << std::endl;
typeVectorVertex eleventhSimplexVector = {2, 1, 0};
returnValue = simplexTree.insert_simplex(eleventhSimplexVector, Filtration_value(FOURTH_FILTRATION_VALUE));
if (returnValue.second == true) {
- std::cout << " + (2,1,0) INSERTED" << std::endl;
+ std::clog << " + (2,1,0) INSERTED" << std::endl;
} else {
- std::cout << " - (2,1,0) NOT INSERTED" << std::endl;
+ std::clog << " - (2,1,0) NOT INSERTED" << std::endl;
}
// ++ GENERAL VARIABLE SET
- std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << simplexTree.num_simplices() << " simplices\n";
- std::cout << " - dimension " << simplexTree.dimension() << "\n";
- std::cout << "* Iterator on simplices, with [filtration value]:\n";
- for (Simplex_tree::Simplex_handle f_simplex : simplexTree.complex_simplex_range()) {
- std::cout << " "
- << "[" << simplexTree.filtration(f_simplex) << "] ";
- for (auto vertex : simplexTree.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
- std::cout << std::endl;
- }
-
- std::cout << "********************************************************************\n";
- // Can not be done in the middle of 2 inserts
- std::cout << "* Iterator on simplices sorted by filtration values, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* The complex contains " << simplexTree.num_simplices() << " simplices\n";
+ std::clog << " - dimension " << simplexTree.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplexTree.filtration_simplex_range()) {
- std::cout << " "
+ std::clog << " "
<< "[" << simplexTree.filtration(f_simplex) << "] ";
- for (auto vertex : simplexTree.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ for (auto vertex : simplexTree.simplex_vertex_range(f_simplex)) std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
// [0.1] 0
// [0.1] 1
@@ -199,66 +189,66 @@ int main(int argc, char* const argv[]) {
// Find in the simplex_tree
// ------------------------------------------------------------------------------------------------------------------
Simplex_tree::Simplex_handle simplexFound = simplexTree.find(secondSimplexVector);
- std::cout << "**************IS THE SIMPLEX {1} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
typeVectorVertex unknownSimplexVector = {15};
simplexFound = simplexTree.find(unknownSimplexVector);
- std::cout << "**************IS THE SIMPLEX {15} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {15} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
simplexFound = simplexTree.find(fifthSimplexVector);
- std::cout << "**************IS THE SIMPLEX {2,0} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {2,0} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
typeVectorVertex otherSimplexVector = {1, 15};
simplexFound = simplexTree.find(otherSimplexVector);
- std::cout << "**************IS THE SIMPLEX {15,1} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {15,1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
typeVectorVertex invSimplexVector = {1, 2, 0};
simplexFound = simplexTree.find(invSimplexVector);
- std::cout << "**************IS THE SIMPLEX {1,2,0} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {1,2,0} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
simplexFound = simplexTree.find({0, 1});
- std::cout << "**************IS THE SIMPLEX {0,1} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {0,1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
- std::cout << "**************COFACES OF {0,1} IN CODIMENSION 1 ARE\n";
+ std::clog << "**************COFACES OF {0,1} IN CODIMENSION 1 ARE\n";
for (auto& simplex : simplexTree.cofaces_simplex_range(simplexTree.find({0, 1}), 1)) {
- for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
- std::cout << "**************STARS OF {0,1} ARE\n";
+ std::clog << "**************STARS OF {0,1} ARE\n";
for (auto& simplex : simplexTree.star_simplex_range(simplexTree.find({0, 1}))) {
- for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
- std::cout << "**************BOUNDARIES OF {0,1,2} ARE\n";
+ std::clog << "**************BOUNDARIES OF {0,1,2} ARE\n";
for (auto& simplex : simplexTree.boundary_simplex_range(simplexTree.find({0, 1, 2}))) {
- for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
return 0;
diff --git a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp
index f6dfa53c..6278efa7 100644
--- a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp
+++ b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp
@@ -42,67 +42,67 @@ int main(int argc, char * const argv[]) {
// insert the graph in the simplex tree as 1-skeleton
st.insert_graph(g);
end = clock();
- std::cout << "Insert the 1-skeleton in the simplex tree in "
+ std::clog << "Insert the 1-skeleton in the simplex tree in "
<< static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
start = clock();
// expand the 1-skeleton until dimension max_dim
st.expansion(max_dim);
end = clock();
- std::cout << "max_dim = " << max_dim << "\n";
- std::cout << "Expand the simplex tree in "
+ std::clog << "max_dim = " << max_dim << "\n";
+ std::clog << "Expand the simplex tree in "
<< static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
- std::cout << "Information of the Simplex Tree: " << std::endl;
- std::cout << " Number of vertices = " << st.num_vertices() << " ";
- std::cout << " Number of simplices = " << st.num_simplices() << std::endl;
- std::cout << std::endl << std::endl;
+ std::clog << "Information of the Simplex Tree: " << std::endl;
+ std::clog << " Number of vertices = " << st.num_vertices() << " ";
+ std::clog << " Number of simplices = " << st.num_simplices() << std::endl;
+ std::clog << std::endl << std::endl;
- std::cout << "Iterator on vertices: ";
+ std::clog << "Iterator on vertices: ";
for (auto vertex : st.complex_vertex_range()) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
- std::cout << std::endl << std::endl;
+ std::clog << std::endl << std::endl;
- std::cout << "Iterator on simplices: " << std::endl;
+ std::clog << "Iterator on simplices: " << std::endl;
for (auto simplex : st.complex_simplex_range()) {
- std::cout << " ";
+ std::clog << " ";
for (auto vertex : st.simplex_vertex_range(simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
- std::cout << std::endl << std::endl;
+ std::clog << std::endl << std::endl;
- std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
- std::cout << std::endl << std::endl;
+ std::clog << std::endl << std::endl;
- std::cout << "Iterator on Simplices in the filtration, and their boundary simplices:" << std::endl;
+ std::clog << "Iterator on Simplices in the filtration, and their boundary simplices:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
for (auto b_simplex : st.boundary_simplex_range(f_simplex)) {
- std::cout << " " << "[" << st.filtration(b_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(b_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(b_simplex)) {
- std::cout << vertex << " ";
+ std::clog << vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
return 0;
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index b7fb9002..889dbd00 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -42,6 +42,20 @@
namespace Gudhi {
+/**
+ * \class Extended_simplex_type Simplex_tree.h gudhi/Simplex_tree.h
+ * \brief Extended simplex type data structure for representing the type of simplices in an extended filtration.
+ *
+ * \details The extended simplex type can be either UP (which means
+ * that the simplex was present originally, and is thus part of the ascending extended filtration), DOWN (which means
+ * that the simplex is the cone of an original simplex, and is thus part of the descending extended filtration) or
+ * EXTRA (which means the simplex is the cone point).
+ *
+ * Details may be found in \cite Cohen-Steiner2009 and section 2.2 in \cite Carriere16.
+ *
+ */
+enum class Extended_simplex_type {UP, DOWN, EXTRA};
+
struct Simplex_tree_options_full_featured;
/**
@@ -87,6 +101,8 @@ class Simplex_tree {
/* \brief Set of nodes sharing a same parent in the simplex tree. */
typedef Simplex_tree_siblings<Simplex_tree, Dictionary> Siblings;
+
+
struct Key_simplex_base_real {
Key_simplex_base_real() : key_(-1) {}
void assign_key(Simplex_key k) { key_ = k; }
@@ -100,6 +116,12 @@ class Simplex_tree {
void assign_key(Simplex_key);
Simplex_key key() const;
};
+ struct Extended_filtration_data {
+ Filtration_value minval;
+ Filtration_value maxval;
+ Extended_filtration_data(){}
+ Extended_filtration_data(Filtration_value vmin, Filtration_value vmax): minval(vmin), maxval(vmax) {}
+ };
typedef typename std::conditional<Options::store_key, Key_simplex_base_real, Key_simplex_base_dummy>::type
Key_simplex_base;
@@ -120,7 +142,10 @@ class Simplex_tree {
public:
/** \brief Handle type to a simplex contained in the simplicial complex represented
- * by the simplex tree. */
+ * by the simplex tree.
+ *
+ * They are essentially pointers into internal vectors, and any insertion or removal
+ * of a simplex may invalidate any other Simplex_handle in the complex. */
typedef typename Dictionary::iterator Simplex_handle;
private:
@@ -233,11 +258,9 @@ class Simplex_tree {
*
* The filtration must be valid. If the filtration has not been initialized yet, the
* method initializes it (i.e. order the simplices). If the complex has changed since the last time the filtration
- * was initialized, please call `initialize_filtration()` to recompute it. */
+ * was initialized, please call `clear_filtration()` or `initialize_filtration()` to recompute it. */
Filtration_simplex_range const& filtration_simplex_range(Indexing_tag = Indexing_tag()) {
- if (filtration_vect_.empty()) {
- initialize_filtration();
- }
+ maybe_initialize_filtration();
return filtration_vect_;
}
@@ -287,7 +310,7 @@ class Simplex_tree {
/** \brief User-defined copy constructor reproduces the whole tree structure. */
Simplex_tree(const Simplex_tree& complex_source) {
#ifdef DEBUG_TRACES
- std::cout << "Simplex_tree copy constructor" << std::endl;
+ std::clog << "Simplex_tree copy constructor" << std::endl;
#endif // DEBUG_TRACES
copy_from(complex_source);
}
@@ -297,7 +320,7 @@ class Simplex_tree {
*/
Simplex_tree(Simplex_tree && complex_source) {
#ifdef DEBUG_TRACES
- std::cout << "Simplex_tree move constructor" << std::endl;
+ std::clog << "Simplex_tree move constructor" << std::endl;
#endif // DEBUG_TRACES
move_from(complex_source);
@@ -314,7 +337,7 @@ class Simplex_tree {
/** \brief User-defined copy assignment reproduces the whole tree structure. */
Simplex_tree& operator= (const Simplex_tree& complex_source) {
#ifdef DEBUG_TRACES
- std::cout << "Simplex_tree copy assignment" << std::endl;
+ std::clog << "Simplex_tree copy assignment" << std::endl;
#endif // DEBUG_TRACES
// Self-assignment detection
if (&complex_source != this) {
@@ -331,7 +354,7 @@ class Simplex_tree {
*/
Simplex_tree& operator=(Simplex_tree&& complex_source) {
#ifdef DEBUG_TRACES
- std::cout << "Simplex_tree move assignment" << std::endl;
+ std::clog << "Simplex_tree move assignment" << std::endl;
#endif // DEBUG_TRACES
// Self-assignment detection
if (&complex_source != this) {
@@ -463,7 +486,7 @@ class Simplex_tree {
public:
/** \brief Returns the key associated to a simplex.
*
- * The filtration must be initialized.
+ * If no key has been assigned, returns `null_key()`.
* \pre SimplexTreeOptions::store_key
*/
static Simplex_key key(Simplex_handle sh) {
@@ -473,7 +496,6 @@ class Simplex_tree {
/** \brief Returns the simplex that has index idx in the filtration.
*
* The filtration must be initialized.
- * \pre SimplexTreeOptions::store_key
*/
Simplex_handle simplex(Simplex_key idx) const {
return filtration_vect_[idx];
@@ -509,8 +531,7 @@ class Simplex_tree {
return Dictionary_it(nullptr);
}
- /** \brief Returns a key different for all keys associated to the
- * simplices of the simplicial complex. */
+ /** \brief Returns a fixed number not in the interval [0, `num_simplices()`). */
static Simplex_key null_key() {
return -1;
}
@@ -765,12 +786,7 @@ class Simplex_tree {
if (first == last)
return { null_simplex(), true }; // FIXME: false would make more sense to me.
- // Copy before sorting
- // Thread local is not available on XCode version < V.8 - It will slow down computation
-#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- thread_local
-#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- std::vector<Vertex_handle> copy;
+ thread_local std::vector<Vertex_handle> copy;
copy.clear();
copy.insert(copy.end(), first, last);
std::sort(copy.begin(), copy.end());
@@ -860,15 +876,13 @@ class Simplex_tree {
}
public:
- /** \brief Initializes the filtrations, i.e. sort the
- * simplices according to their order in the filtration and initializes all Simplex_keys.
+ /** \brief Initializes the filtration cache, i.e. sorts the
+ * simplices according to their order in the filtration.
*
- * After calling this method, filtration_simplex_range() becomes valid, and each simplex is
- * assigned a Simplex_key corresponding to its order in the filtration (from 0 to m-1 for a
- * simplicial complex with m simplices).
+ * It always recomputes the cache, even if one already exists.
*
- * Will be automatically called when calling filtration_simplex_range()
- * if the filtration has never been initialized yet. */
+ * Any insertion, deletion or change of filtration value invalidates this cache,
+ * which can be cleared with clear_filtration(). */
void initialize_filtration() {
filtration_vect_.clear();
filtration_vect_.reserve(num_simplices());
@@ -890,6 +904,21 @@ class Simplex_tree {
std::stable_sort(filtration_vect_.begin(), filtration_vect_.end(), is_before_in_filtration(this));
#endif
}
+ /** \brief Initializes the filtration cache if it isn't initialized yet.
+ *
+ * Automatically called by filtration_simplex_range(). */
+ void maybe_initialize_filtration() {
+ if (filtration_vect_.empty()) {
+ initialize_filtration();
+ }
+ }
+ /** \brief Clears the filtration cache produced by initialize_filtration().
+ *
+ * Useful when initialize_filtration() has already been called and we perform an operation
+ * (say an insertion) that invalidates the cache. */
+ void clear_filtration() {
+ filtration_vect_.clear();
+ }
private:
/** Recursive search of cofaces
@@ -1111,6 +1140,7 @@ class Simplex_tree {
* 1 when calling the method. */
void expansion(int max_dim) {
if (max_dim <= 1) return;
+ clear_filtration(); // Drop the cache.
dimension_ = max_dim;
for (Dictionary_it root_it = root_.members_.begin();
root_it != root_.members_.end(); ++root_it) {
@@ -1133,10 +1163,7 @@ class Simplex_tree {
Dictionary_it next = siblings->members().begin();
++next;
-#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- thread_local
-#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
- std::vector<std::pair<Vertex_handle, Node> > inter;
+ thread_local std::vector<std::pair<Vertex_handle, Node> > inter;
for (Dictionary_it s_h = siblings->members().begin();
s_h != siblings->members().end(); ++s_h, ++next) {
Simplex_handle root_sh = find_vertex(s_h->first);
@@ -1324,9 +1351,6 @@ class Simplex_tree {
/** \brief This function ensures that each simplex has a higher filtration value than its faces by increasing the
* filtration values.
* @return True if any filtration value was modified, false if the filtration was already non-decreasing.
- * \post Some simplex tree functions require the filtration to be valid. `make_filtration_non_decreasing()`
- * function is not launching `initialize_filtration()` but returns the filtration modification information. If the
- * complex has changed , please call `initialize_filtration()` to recompute it.
*
* If a simplex has a `NaN` filtration value, it is considered lower than any other defined filtration value.
*/
@@ -1338,6 +1362,8 @@ class Simplex_tree {
modified |= rec_make_filtration_non_decreasing(simplex.second.children());
}
}
+ if(modified)
+ clear_filtration(); // Drop the cache.
return modified;
}
@@ -1377,16 +1403,16 @@ class Simplex_tree {
public:
/** \brief Prune above filtration value given as parameter.
* @param[in] filtration Maximum threshold value.
- * @return The filtration modification information.
- * \post Some simplex tree functions require the filtration to be valid. `prune_above_filtration()`
- * function is not launching `initialize_filtration()` but returns the filtration modification information. If the
- * complex has changed , please call `initialize_filtration()` to recompute it.
+ * @return True if any simplex was removed, false if all simplices already had a value below the threshold.
* \post Note that the dimension of the simplicial complex may be lower after calling `prune_above_filtration()`
* than it was before. However, `upper_bound_dimension()` will return the old value, which remains a valid upper
* bound. If you care, you can call `dimension()` to recompute the exact dimension.
*/
bool prune_above_filtration(Filtration_value filtration) {
- return rec_prune_above_filtration(root(), filtration);
+ bool modified = rec_prune_above_filtration(root(), filtration);
+ if(modified)
+ clear_filtration(); // Drop the cache.
+ return modified;
}
private:
@@ -1432,9 +1458,9 @@ class Simplex_tree {
for (Simplex_handle sh : complex_simplex_range()) {
#ifdef DEBUG_TRACES
for (auto vertex : simplex_vertex_range(sh)) {
- std::cout << " " << vertex;
+ std::clog << " " << vertex;
}
- std::cout << std::endl;
+ std::clog << std::endl;
#endif // DEBUG_TRACES
int sh_dimension = dimension(sh);
@@ -1453,7 +1479,6 @@ class Simplex_tree {
* @param[in] sh Simplex handle on the maximal simplex to remove.
* \pre Please check the simplex has no coface before removing it.
* \exception std::invalid_argument In debug mode, if sh has children.
- * \post Be aware that removing is shifting data in a flat_map (initialize_filtration to be done).
* \post Note that the dimension of the simplicial complex may be lower after calling `remove_maximal_simplex()`
* than it was before. However, `upper_bound_dimension()` will return the old value, which remains a valid upper
* bound. If you care, you can call `dimension()` to recompute the exact dimension.
@@ -1479,6 +1504,109 @@ class Simplex_tree {
}
}
+ /** \brief Retrieve the original filtration value for a given simplex in the Simplex_tree. Since the
+ * computation of extended persistence requires modifying the filtration values, this function can be used
+ * to recover the original values. Moreover, computing extended persistence requires adding new simplices
+ * in the Simplex_tree. Hence, this function also outputs the type of each simplex. It can be either UP (which means
+ * that the simplex was present originally, and is thus part of the ascending extended filtration), DOWN (which means
+ * that the simplex is the cone of an original simplex, and is thus part of the descending extended filtration) or
+ * EXTRA (which means the simplex is the cone point). See the definition of Extended_simplex_type. Note that if the simplex type is DOWN, the original filtration value
+ * is set to be the original filtration value of the corresponding (not coned) original simplex.
+ * \pre This function should be called only if `extend_filtration()` has been called first!
+ * \post The output filtration value is supposed to be the same, but might be a little different, than the
+ * original filtration value, due to the internal transformation (scaling to [-2,-1]) that is
+ * performed on the original filtration values during the computation of extended persistence.
+ * @param[in] f Filtration value of the simplex in the extended (i.e., modified) filtration.
+ * @param[in] efd Structure containing the minimum and maximum values of the original filtration. This the output of `extend_filtration()`.
+ * @return A pair containing the original filtration value of the simplex as well as the simplex type.
+ */
+ std::pair<Filtration_value, Extended_simplex_type> decode_extended_filtration(Filtration_value f, const Extended_filtration_data& efd){
+ std::pair<Filtration_value, Extended_simplex_type> p;
+ Filtration_value minval = efd.minval;
+ Filtration_value maxval = efd.maxval;
+ if (f >= -2 && f <= -1){
+ p.first = minval + (maxval-minval)*(f + 2); p.second = Extended_simplex_type::UP;
+ }
+ else if (f >= 1 && f <= 2){
+ p.first = minval - (maxval-minval)*(f - 2); p.second = Extended_simplex_type::DOWN;
+ }
+ else{
+ p.first = std::numeric_limits<Filtration_value>::quiet_NaN(); p.second = Extended_simplex_type::EXTRA;
+ }
+ return p;
+ };
+
+ /** \brief Extend filtration for computing extended persistence.
+ * This function only uses the filtration values at the 0-dimensional simplices,
+ * and computes the extended persistence diagram induced by the lower-star filtration
+ * computed with these values.
+ * \post Note that after calling this function, the filtration
+ * values are actually modified. The function `decode_extended_filtration()`
+ * retrieves the original values and outputs the extended simplex type.
+ * \pre Note that this code creates an extra vertex internally, so you should make sure that
+ * the Simplex tree does not contain a vertex with the largest Vertex_handle.
+ * @return A data structure containing the maximum and minimum values of the original filtration.
+ * It is meant to be provided as input to `decode_extended_filtration()` in order to retrieve
+ * the original filtration values for each simplex.
+ */
+ Extended_filtration_data extend_filtration() {
+ clear_filtration(); // Drop the cache.
+
+ // Compute maximum and minimum of filtration values
+ Vertex_handle maxvert = std::numeric_limits<Vertex_handle>::min();
+ Filtration_value minval = std::numeric_limits<Filtration_value>::infinity();
+ Filtration_value maxval = -std::numeric_limits<Filtration_value>::infinity();
+ for (auto sh = root_.members().begin(); sh != root_.members().end(); ++sh){
+ Filtration_value f = this->filtration(sh);
+ minval = std::min(minval, f);
+ maxval = std::max(maxval, f);
+ maxvert = std::max(sh->first, maxvert);
+ }
+
+ GUDHI_CHECK(maxvert < std::numeric_limits<Vertex_handle>::max(), std::invalid_argument("Simplex_tree contains a vertex with the largest Vertex_handle"));
+ maxvert += 1;
+
+ Simplex_tree st_copy = *this;
+
+ // Add point for coning the simplicial complex
+ this->insert_simplex({maxvert}, -3);
+
+ // For each simplex
+ std::vector<Vertex_handle> vr;
+ for (auto sh_copy : st_copy.complex_simplex_range()){
+
+ // Locate simplex
+ vr.clear();
+ for (auto vh : st_copy.simplex_vertex_range(sh_copy)){
+ vr.push_back(vh);
+ }
+ auto sh = this->find(vr);
+
+ // Create cone on simplex
+ vr.push_back(maxvert);
+ if (this->dimension(sh) == 0){
+ Filtration_value v = this->filtration(sh);
+ Filtration_value scaled_v = (v-minval)/(maxval-minval);
+ // Assign ascending value between -2 and -1 to vertex
+ this->assign_filtration(sh, -2 + scaled_v);
+ // Assign descending value between 1 and 2 to cone on vertex
+ this->insert_simplex(vr, 2 - scaled_v);
+ }
+ else{
+ // Assign value -3 to simplex and cone on simplex
+ this->assign_filtration(sh, -3);
+ this->insert_simplex(vr, -3);
+ }
+ }
+
+ // Automatically assign good values for simplices
+ this->make_filtration_non_decreasing();
+
+ // Return the filtration data
+ Extended_filtration_data efd(minval, maxval);
+ return efd;
+ }
+
/** \brief Returns a vertex of `sh` that has the same filtration value as `sh` if it exists, and `null_vertex()` otherwise.
*
* For a lower-star filtration built with `make_filtration_non_decreasing()`, this is a way to invert the process and find out which vertex had its filtration value propagated to `sh`.
diff --git a/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp
index c0615b12..229ae46f 100644
--- a/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_ctor_and_move_unit_test.cpp
@@ -30,16 +30,16 @@ void print_simplex_filtration(Simplex_tree& st, const std::string& msg) {
// Required before browsing through filtration values
st.initialize_filtration();
- std::cout << "********************************************************************\n";
- std::cout << "* " << msg << "\n";
- std::cout << "* The complex contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* " << msg << "\n";
+ std::clog << "* The complex contains " << st.num_simplices() << " simplices";
+ std::clog << " - dimension " << st.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " "
+ std::clog << " "
<< "[" << st.filtration(f_simplex) << "] ";
- for (auto vertex : st.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
}
@@ -70,8 +70,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_copy_constructor, Simplex_tree, list_of_te
print_simplex_filtration(st, "Default Simplex_tree is initialized");
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF COPY CONSTRUCTOR" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF COPY CONSTRUCTOR" << std::endl;
Simplex_tree st1(st);
Simplex_tree st2(st);
@@ -82,8 +82,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_copy_constructor, Simplex_tree, list_of_te
BOOST_CHECK(st == st2);
BOOST_CHECK(st1 == st);
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF COPY ASSIGNMENT" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF COPY ASSIGNMENT" << std::endl;
Simplex_tree st3;
// To check there is no memory leak
st3.insert_simplex_and_subfaces({9, 10, 11}, 200.0);
@@ -103,8 +103,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_copy_constructor, Simplex_tree, list_of_te
BOOST_CHECK(st3 == st);
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF MOVE CONSTRUCTOR" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF MOVE CONSTRUCTOR" << std::endl;
Simplex_tree st5(std::move(st1));
print_simplex_filtration(st5, "First move constructor from the default Simplex_tree");
print_simplex_filtration(st1, "First moved Simplex_tree shall be empty");
@@ -122,8 +122,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_copy_constructor, Simplex_tree, list_of_te
BOOST_CHECK(empty_st == st2);
BOOST_CHECK(st1 == empty_st);
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF MOVE ASSIGNMENT" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF MOVE ASSIGNMENT" << std::endl;
Simplex_tree st7;
// To check there is no memory leak
diff --git a/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp
index fab25eb8..881a06ae 100644
--- a/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp
@@ -55,34 +55,34 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_with_blockers_3, typeST, li
simplex_tree.expansion_with_blockers(3, [&](Simplex_handle sh){
bool result = false;
- std::cout << "Blocker on [";
+ std::clog << "Blocker on [";
// User can loop on the vertices from the given simplex_handle i.e.
for (auto vertex : simplex_tree.simplex_vertex_range(sh)) {
// We block the expansion, if the vertex '6' is in the given list of vertices
if (vertex == 6)
result = true;
- std::cout << vertex << ", ";
+ std::clog << vertex << ", ";
}
- std::cout << "] ( " << simplex_tree.filtration(sh);
+ std::clog << "] ( " << simplex_tree.filtration(sh);
// User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
simplex_tree.assign_filtration(sh, simplex_tree.filtration(sh) + 1.);
- std::cout << " + 1. ) = " << result << std::endl;
+ std::clog << " + 1. ) = " << result << std::endl;
return result;
});
- std::cout << "********************************************************************\n";
- std::cout << "simplex_tree_expansion_with_blockers_3\n";
- std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
- std::cout << " - dimension " << simplex_tree.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "simplex_tree_expansion_with_blockers_3\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::clog << " - dimension " << simplex_tree.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
- std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
BOOST_CHECK(simplex_tree.num_simplices() == 23);
@@ -117,34 +117,34 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_with_blockers_2, typeST, li
simplex_tree.expansion_with_blockers(2, [&](Simplex_handle sh){
bool result = false;
- std::cout << "Blocker on [";
+ std::clog << "Blocker on [";
// User can loop on the vertices from the given simplex_handle i.e.
for (auto vertex : simplex_tree.simplex_vertex_range(sh)) {
// We block the expansion, if the vertex '6' is in the given list of vertices
if (vertex == 6)
result = true;
- std::cout << vertex << ", ";
+ std::clog << vertex << ", ";
}
- std::cout << "] ( " << simplex_tree.filtration(sh);
+ std::clog << "] ( " << simplex_tree.filtration(sh);
// User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
simplex_tree.assign_filtration(sh, simplex_tree.filtration(sh) + 1.);
- std::cout << " + 1. ) = " << result << std::endl;
+ std::clog << " + 1. ) = " << result << std::endl;
return result;
});
- std::cout << "********************************************************************\n";
- std::cout << "simplex_tree_expansion_with_blockers_2\n";
- std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
- std::cout << " - dimension " << simplex_tree.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "simplex_tree_expansion_with_blockers_2\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::clog << " - dimension " << simplex_tree.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
- std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
BOOST_CHECK(simplex_tree.num_simplices() == 22);
@@ -176,17 +176,17 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion, typeST, list_of_tested_var
simplex_tree.insert_simplex({6}, 10.);
simplex_tree.expansion(3);
- std::cout << "********************************************************************\n";
- std::cout << "simplex_tree_expansion_3\n";
- std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
- std::cout << " - dimension " << simplex_tree.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "simplex_tree_expansion_3\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::clog << " - dimension " << simplex_tree.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
- std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
BOOST_CHECK(simplex_tree.num_simplices() == 24);
@@ -220,17 +220,17 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_2, typeST, list_of_tested_v
simplex_tree.expansion(2);
- std::cout << "********************************************************************\n";
- std::cout << "simplex_tree_expansion_2\n";
- std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
- std::cout << " - dimension " << simplex_tree.dimension() << "\n";
- std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "simplex_tree_expansion_2\n";
+ std::clog << "********************************************************************\n";
+ std::clog << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::clog << " - dimension " << simplex_tree.dimension() << "\n";
+ std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
- std::cout << "(" << vertex << ")";
- std::cout << std::endl;
+ std::clog << "(" << vertex << ")";
+ std::clog << std::endl;
}
BOOST_CHECK(simplex_tree.num_simplices() == 23);
diff --git a/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp
index 28c29489..20007488 100644
--- a/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp
@@ -34,8 +34,8 @@ typedef boost::mpl::list<Simplex_tree<>,
> list_of_tested_variants;
BOOST_AUTO_TEST_CASE_TEMPLATE(iostream_operator, Stree_type, list_of_tested_variants) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "SIMPLEX TREE IOSTREAM OPERATOR" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "SIMPLEX TREE IOSTREAM OPERATOR" << std::endl;
Stree_type st;
@@ -46,15 +46,15 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(iostream_operator, Stree_type, list_of_tested_vari
st.initialize_filtration();
// Display the Simplex_tree
- std::cout << "The ORIGINAL complex contains " << st.num_simplices() << " simplices - dimension = "
+ std::clog << "The ORIGINAL complex contains " << st.num_simplices() << " simplices - dimension = "
<< st.dimension() << std::endl;
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
+ std::clog << (int) vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
// st:
@@ -75,15 +75,15 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(iostream_operator, Stree_type, list_of_tested_vari
simplex_tree_istream >> read_st;
// Display the Simplex_tree
- std::cout << "The READ complex contains " << read_st.num_simplices() << " simplices - dimension = "
+ std::clog << "The READ complex contains " << read_st.num_simplices() << " simplices - dimension = "
<< read_st.dimension() << std::endl;
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : read_st.filtration_simplex_range()) {
- std::cout << " " << "[" << read_st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << read_st.filtration(f_simplex) << "] ";
for (auto vertex : read_st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
+ std::clog << (int) vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
BOOST_CHECK(st == read_st);
@@ -91,8 +91,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(iostream_operator, Stree_type, list_of_tested_vari
BOOST_AUTO_TEST_CASE(mini_iostream_operator) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "MINI SIMPLEX TREE IOSTREAM OPERATOR" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "MINI SIMPLEX TREE IOSTREAM OPERATOR" << std::endl;
Simplex_tree<MyOptions> st;
@@ -103,14 +103,14 @@ BOOST_AUTO_TEST_CASE(mini_iostream_operator) {
st.initialize_filtration();
// Display the Simplex_tree
- std::cout << "The ORIGINAL complex contains " << st.num_simplices() << " simplices - dimension = "
+ std::clog << "The ORIGINAL complex contains " << st.num_simplices() << " simplices - dimension = "
<< st.dimension() << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
+ std::clog << (int) vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
// st:
@@ -131,15 +131,15 @@ BOOST_AUTO_TEST_CASE(mini_iostream_operator) {
simplex_tree_istream >> read_st;
// Display the Simplex_tree
- std::cout << "The READ complex contains " << read_st.num_simplices() << " simplices - dimension = "
+ std::clog << "The READ complex contains " << read_st.num_simplices() << " simplices - dimension = "
<< read_st.dimension() << std::endl;
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : read_st.filtration_simplex_range()) {
- std::cout << " " << "[" << read_st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << read_st.filtration(f_simplex) << "] ";
for (auto vertex : read_st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
+ std::clog << (int) vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
BOOST_CHECK(st == read_st);
diff --git a/src/Simplex_tree/test/simplex_tree_make_filtration_non_decreasing_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_make_filtration_non_decreasing_unit_test.cpp
index 4697ec05..e0e7cadf 100644
--- a/src/Simplex_tree/test/simplex_tree_make_filtration_non_decreasing_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_make_filtration_non_decreasing_unit_test.cpp
@@ -41,7 +41,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(make_filtration_non_decreasing, typeST, list_of_te
/* o */
/* 5 */
- std::cout << "Check default insertion ensures the filtration values are non decreasing" << std::endl;
+ std::clog << "Check default insertion ensures the filtration values are non decreasing" << std::endl;
BOOST_CHECK(!st.make_filtration_non_decreasing());
// Because of non decreasing property of simplex tree, { 0 } , { 1 } and { 0, 1 } are going to be set from value 2.0
@@ -57,7 +57,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(make_filtration_non_decreasing, typeST, list_of_te
// o
// 5
- std::cout << "Check default second insertion ensures the filtration values are non decreasing" << std::endl;
+ std::clog << "Check default second insertion ensures the filtration values are non decreasing" << std::endl;
BOOST_CHECK(!st.make_filtration_non_decreasing());
// Copy original simplex tree
@@ -71,7 +71,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(make_filtration_non_decreasing, typeST, list_of_te
st.assign_filtration(st.find({3,4}), 1.1);
st.assign_filtration(st.find({4,5}), 1.99);
- std::cout << "Check the simplex_tree is rolled back in case of decreasing filtration values" << std::endl;
+ std::clog << "Check the simplex_tree is rolled back in case of decreasing filtration values" << std::endl;
BOOST_CHECK(st.make_filtration_non_decreasing());
BOOST_CHECK(st == st_copy);
@@ -87,7 +87,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(make_filtration_non_decreasing, typeST, list_of_te
// By modifying just the simplex {2}
// {0,1,2}, {1,2} and {0,2} will be modified
- std::cout << "Check the simplex_tree is repaired in case of decreasing filtration values" << std::endl;
+ std::clog << "Check the simplex_tree is repaired in case of decreasing filtration values" << std::endl;
BOOST_CHECK(st.make_filtration_non_decreasing());
BOOST_CHECK(st == st_other);
@@ -100,7 +100,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(make_filtration_non_decreasing, typeST, list_of_te
// Other copy simplex tree
typeST st_other_copy = st;
- std::cout << "Check the simplex_tree is not modified in case of non-decreasing filtration values" << std::endl;
+ std::clog << "Check the simplex_tree is not modified in case of non-decreasing filtration values" << std::endl;
BOOST_CHECK(!st.make_filtration_non_decreasing());
BOOST_CHECK(st == st_other_copy);
@@ -122,11 +122,11 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(make_filtration_non_decreasing_on_nan_values, type
/* o */
/* 5 */
- std::cout << "SPECIFIC CASE:" << std::endl;
- std::cout << "Insertion with NaN values does not ensure the filtration values are non decreasing" << std::endl;
+ std::clog << "SPECIFIC CASE:" << std::endl;
+ std::clog << "Insertion with NaN values does not ensure the filtration values are non decreasing" << std::endl;
st.make_filtration_non_decreasing();
- std::cout << "Check all filtration values are NaN" << std::endl;
+ std::clog << "Check all filtration values are NaN" << std::endl;
for (auto f_simplex : st.complex_simplex_range()) {
BOOST_CHECK(std::isnan(st.filtration(f_simplex)));
}
@@ -138,11 +138,11 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(make_filtration_non_decreasing_on_nan_values, type
st.assign_filtration(st.find({4}), 0.);
st.assign_filtration(st.find({5}), 0.);
- std::cout << "Check make_filtration_non_decreasing is modifying the simplicial complex" << std::endl;
+ std::clog << "Check make_filtration_non_decreasing is modifying the simplicial complex" << std::endl;
BOOST_CHECK(st.make_filtration_non_decreasing());
- std::cout << "Check all filtration values are now defined" << std::endl;
+ std::clog << "Check all filtration values are now defined" << std::endl;
for (auto f_simplex : st.complex_simplex_range()) {
BOOST_CHECK(!std::isnan(st.filtration(f_simplex)));
}
-} \ No newline at end of file
+}
diff --git a/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp
index 97347992..36b8b3c6 100644
--- a/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp
@@ -32,8 +32,8 @@ using Mini_stree = Simplex_tree<MyOptions>;
using Stree = Simplex_tree<>;
BOOST_AUTO_TEST_CASE(remove_maximal_simplex) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "REMOVE MAXIMAL SIMPLEX" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "REMOVE MAXIMAL SIMPLEX" << std::endl;
Mini_stree st;
@@ -66,21 +66,21 @@ BOOST_AUTO_TEST_CASE(remove_maximal_simplex) {
// 5
#ifdef GUDHI_DEBUG
- std::cout << "Check exception throw in debug mode" << std::endl;
+ std::clog << "Check exception throw in debug mode" << std::endl;
// throw excpt because sh has children
BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({0, 1, 6})), std::invalid_argument);
BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({3})), std::invalid_argument);
BOOST_CHECK(st == st_complete);
#endif
- std::cout << "st.remove_maximal_simplex({0, 2})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 2})" << std::endl;
st.remove_maximal_simplex(st.find({0, 2}));
- std::cout << "st.remove_maximal_simplex({0, 1, 2})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 1, 2})" << std::endl;
st.remove_maximal_simplex(st.find({0, 1, 2}));
- std::cout << "st.remove_maximal_simplex({1, 2})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({1, 2})" << std::endl;
st.remove_maximal_simplex(st.find({1, 2}));
- std::cout << "st.remove_maximal_simplex({2})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({2})" << std::endl;
st.remove_maximal_simplex(st.find({2}));
- std::cout << "st.remove_maximal_simplex({3})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({3})" << std::endl;
st.remove_maximal_simplex(st.find({0, 3}));
BOOST_CHECK(st == st_pruned);
@@ -102,39 +102,39 @@ BOOST_AUTO_TEST_CASE(remove_maximal_simplex) {
// 5
// Remove all 7 to test the both remove_maximal_simplex cases (when _members is empty or not)
- std::cout << "st.remove_maximal_simplex({0, 1, 6, 7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 1, 6, 7})" << std::endl;
st.remove_maximal_simplex(st.find({0, 1, 6, 7}));
- std::cout << "st.remove_maximal_simplex({0, 1, 7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 1, 7})" << std::endl;
st.remove_maximal_simplex(st.find({0, 1, 7}));
- std::cout << "st.remove_maximal_simplex({0, 6, 7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 6, 7})" << std::endl;
st.remove_maximal_simplex(st.find({0, 6, 7}));
- std::cout << "st.remove_maximal_simplex({0, 7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 7})" << std::endl;
st.remove_maximal_simplex(st.find({0, 7}));
- std::cout << "st.remove_maximal_simplex({1, 6, 7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({1, 6, 7})" << std::endl;
st.remove_maximal_simplex(st.find({1, 6, 7}));
- std::cout << "st.remove_maximal_simplex({1, 7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({1, 7})" << std::endl;
st.remove_maximal_simplex(st.find({1, 7}));
- std::cout << "st.remove_maximal_simplex({6, 7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({6, 7})" << std::endl;
st.remove_maximal_simplex(st.find({6, 7}));
- std::cout << "st.remove_maximal_simplex({7})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({7})" << std::endl;
st.remove_maximal_simplex(st.find({7}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
// Check dimension calls lower_upper_bound_dimension to recompute dimension
BOOST_CHECK(st.dimension() == 2);
BOOST_CHECK(st.upper_bound_dimension() == 2);
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension()
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension()
<< " | st_wo_seven.upper_bound_dimension()=" << st_wo_seven.upper_bound_dimension() << std::endl;
- std::cout << "st.dimension()=" << st.dimension() << " | st_wo_seven.dimension()=" << st_wo_seven.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << " | st_wo_seven.dimension()=" << st_wo_seven.dimension() << std::endl;
BOOST_CHECK(st == st_wo_seven);
}
BOOST_AUTO_TEST_CASE(auto_dimension_set) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "DIMENSION ON REMOVE MAXIMAL SIMPLEX" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "DIMENSION ON REMOVE MAXIMAL SIMPLEX" << std::endl;
Mini_stree st;
@@ -148,80 +148,80 @@ BOOST_AUTO_TEST_CASE(auto_dimension_set) {
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.remove_maximal_simplex({6, 7, 8, 10})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({6, 7, 8, 10})" << std::endl;
st.remove_maximal_simplex(st.find({6, 7, 8, 10}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.remove_maximal_simplex({6, 7, 8, 9})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({6, 7, 8, 9})" << std::endl;
st.remove_maximal_simplex(st.find({6, 7, 8, 9}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.remove_maximal_simplex({1, 2, 3, 4})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({1, 2, 3, 4})" << std::endl;
st.remove_maximal_simplex(st.find({1, 2, 3, 4}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.remove_maximal_simplex({1, 2, 3, 5})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({1, 2, 3, 5})" << std::endl;
st.remove_maximal_simplex(st.find({1, 2, 3, 5}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 2);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
- std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 5})" << std::endl;
+ std::clog << "st.insert_simplex_and_subfaces({1, 2, 3, 5})" << std::endl;
st.insert_simplex_and_subfaces({1, 2, 3, 5});
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 4})" << std::endl;
+ std::clog << "st.insert_simplex_and_subfaces({1, 2, 3, 4})" << std::endl;
st.insert_simplex_and_subfaces({1, 2, 3, 4});
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.remove_maximal_simplex({1, 2, 3, 5})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({1, 2, 3, 5})" << std::endl;
st.remove_maximal_simplex(st.find({1, 2, 3, 5}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.remove_maximal_simplex({1, 2, 3, 4})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({1, 2, 3, 4})" << std::endl;
st.remove_maximal_simplex(st.find({1, 2, 3, 4}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 2);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
- std::cout << "st.insert_simplex_and_subfaces({0, 1, 3, 4})" << std::endl;
+ std::clog << "st.insert_simplex_and_subfaces({0, 1, 3, 4})" << std::endl;
st.insert_simplex_and_subfaces({0, 1, 3, 4});
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.remove_maximal_simplex({0, 1, 3, 4})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 1, 3, 4})" << std::endl;
st.remove_maximal_simplex(st.find({0, 1, 3, 4}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 2);
- std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ std::clog << "st.dimension()=" << st.dimension() << std::endl;
- std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 5})" << std::endl;
+ std::clog << "st.insert_simplex_and_subfaces({1, 2, 3, 5})" << std::endl;
st.insert_simplex_and_subfaces({1, 2, 3, 5});
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 4})" << std::endl;
+ std::clog << "st.insert_simplex_and_subfaces({1, 2, 3, 4})" << std::endl;
st.insert_simplex_and_subfaces({1, 2, 3, 4});
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
@@ -229,7 +229,7 @@ BOOST_AUTO_TEST_CASE(auto_dimension_set) {
// Check you can override the dimension
// This is a limit test case - shall not happen
st.set_dimension(1);
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 1);
// check dimension() and lower_upper_bound_dimension() is not giving the right answer because dimension is too low
BOOST_CHECK(st.dimension() == 1);
@@ -238,7 +238,7 @@ BOOST_AUTO_TEST_CASE(auto_dimension_set) {
// Check you can override the dimension
// This is a limit test case - shall not happen
st.set_dimension(6);
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 6);
// check dimension() do not launch lower_upper_bound_dimension()
BOOST_CHECK(st.dimension() == 6);
@@ -246,27 +246,27 @@ BOOST_AUTO_TEST_CASE(auto_dimension_set) {
// Reset with the correct value
st.set_dimension(3);
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == 3);
- std::cout << "st.insert_simplex_and_subfaces({0, 1, 2, 3, 4, 5, 6})" << std::endl;
+ std::clog << "st.insert_simplex_and_subfaces({0, 1, 2, 3, 4, 5, 6})" << std::endl;
st.insert_simplex_and_subfaces({0, 1, 2, 3, 4, 5, 6});
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 6);
BOOST_CHECK(st.dimension() == 6);
- std::cout << "st.remove_maximal_simplex({0, 1, 2, 3, 4, 5, 6})" << std::endl;
+ std::clog << "st.remove_maximal_simplex({0, 1, 2, 3, 4, 5, 6})" << std::endl;
st.remove_maximal_simplex(st.find({0, 1, 2, 3, 4, 5, 6}));
- std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 6);
BOOST_CHECK(st.dimension() == 5);
}
BOOST_AUTO_TEST_CASE(prune_above_filtration) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "PRUNE ABOVE FILTRATION" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "PRUNE ABOVE FILTRATION" << std::endl;
Stree st;
@@ -321,15 +321,15 @@ BOOST_AUTO_TEST_CASE(prune_above_filtration) {
BOOST_CHECK(!simplex_is_changed);
// Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << std::endl;
- std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << "The complex contains " << st.num_simplices() << " simplices";
+ std::clog << " - dimension " << st.dimension() << std::endl;
+ std::clog << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
+ std::clog << (int) vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
// Check the pruned cases
@@ -340,15 +340,15 @@ BOOST_AUTO_TEST_CASE(prune_above_filtration) {
BOOST_CHECK(simplex_is_changed);
// Display the Simplex_tree
- std::cout << "The complex pruned at 2.5 contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << std::endl;
+ std::clog << "The complex pruned at 2.5 contains " << st.num_simplices() << " simplices";
+ std::clog << " - dimension " << st.dimension() << std::endl;
simplex_is_changed = st.prune_above_filtration(2.0);
if (simplex_is_changed)
st.initialize_filtration();
- std::cout << "The complex pruned at 2.0 contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << std::endl;
+ std::clog << "The complex pruned at 2.0 contains " << st.num_simplices() << " simplices";
+ std::clog << " - dimension " << st.dimension() << std::endl;
BOOST_CHECK(st == st_pruned);
BOOST_CHECK(!simplex_is_changed);
@@ -360,12 +360,12 @@ BOOST_AUTO_TEST_CASE(prune_above_filtration) {
st.initialize_filtration();
// Display the Simplex_tree
- std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices";
- std::cout << " - upper_bound_dimension " << st.upper_bound_dimension() << std::endl;
+ std::clog << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices";
+ std::clog << " - upper_bound_dimension " << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == 3);
BOOST_CHECK(st.dimension() == -1);
- std::cout << "upper_bound_dimension=" << st.upper_bound_dimension() << std::endl;
+ std::clog << "upper_bound_dimension=" << st.upper_bound_dimension() << std::endl;
BOOST_CHECK(st.upper_bound_dimension() == -1);
BOOST_CHECK(st == st_empty);
@@ -380,8 +380,8 @@ BOOST_AUTO_TEST_CASE(prune_above_filtration) {
}
BOOST_AUTO_TEST_CASE(mini_prune_above_filtration) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "MINI PRUNE ABOVE FILTRATION" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "MINI PRUNE ABOVE FILTRATION" << std::endl;
Mini_stree st;
@@ -402,7 +402,7 @@ BOOST_AUTO_TEST_CASE(mini_prune_above_filtration) {
st.initialize_filtration();
// Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
BOOST_CHECK(st.num_simplices() == 27);
// Test case to the limit - With these options, there is no filtration, which means filtration is 0
@@ -410,7 +410,7 @@ BOOST_AUTO_TEST_CASE(mini_prune_above_filtration) {
if (simplex_is_changed)
st.initialize_filtration();
// Display the Simplex_tree
- std::cout << "The complex pruned at 1.0 contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << "The complex pruned at 1.0 contains " << st.num_simplices() << " simplices" << std::endl;
BOOST_CHECK(!simplex_is_changed);
BOOST_CHECK(st.num_simplices() == 27);
@@ -418,7 +418,7 @@ BOOST_AUTO_TEST_CASE(mini_prune_above_filtration) {
if (simplex_is_changed)
st.initialize_filtration();
// Display the Simplex_tree
- std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices" << std::endl;
BOOST_CHECK(!simplex_is_changed);
BOOST_CHECK(st.num_simplices() == 27);
@@ -427,11 +427,11 @@ BOOST_AUTO_TEST_CASE(mini_prune_above_filtration) {
if (simplex_is_changed)
st.initialize_filtration();
// Display the Simplex_tree
- std::cout << "The complex pruned at -1.0 contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << "The complex pruned at -1.0 contains " << st.num_simplices() << " simplices" << std::endl;
BOOST_CHECK(simplex_is_changed);
BOOST_CHECK(st.num_simplices() == 0);
// Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
}
diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
index 3872c4ca..9b5fa8fe 100644
--- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
@@ -48,22 +48,22 @@ void test_empty_simplex_tree(typeST& tst) {
template<class typeST>
void test_iterators_on_empty_simplex_tree(typeST& tst) {
- std::cout << "Iterator on vertices: " << std::endl;
+ std::clog << "Iterator on vertices: " << std::endl;
for (auto vertex : tst.complex_vertex_range()) {
- std::cout << "vertice:" << vertex << std::endl;
+ std::clog << "vertice:" << vertex << std::endl;
BOOST_CHECK(false); // shall be empty
}
- std::cout << "Iterator on simplices: " << std::endl;
+ std::clog << "Iterator on simplices: " << std::endl;
for (auto simplex : tst.complex_simplex_range()) {
BOOST_CHECK(simplex != simplex); // shall be empty - to remove warning of non-used simplex
}
- std::cout
+ std::clog
<< "Iterator on Simplices in the filtration, with [filtration value]:"
<< std::endl;
for (auto f_simplex : tst.filtration_simplex_range()) {
BOOST_CHECK(false); // shall be empty
- std::cout << "test_iterators_on_empty_simplex_tree - filtration="
+ std::clog << "test_iterators_on_empty_simplex_tree - filtration="
<< tst.filtration(f_simplex) << std::endl;
}
}
@@ -72,15 +72,15 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_when_empty, typeST, list_of_tested_va
typedef std::pair<typename typeST::Simplex_handle, bool> typePairSimplexBool;
typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF DEFAULT CONSTRUCTOR" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF DEFAULT CONSTRUCTOR" << std::endl;
typeST st;
test_empty_simplex_tree(st);
test_iterators_on_empty_simplex_tree(st);
// TEST OF EMPTY INSERTION
- std::cout << "TEST OF EMPTY INSERTION" << std::endl;
+ std::clog << "TEST OF EMPTY INSERTION" << std::endl;
typeVectorVertex simplexVectorEmpty;
BOOST_CHECK(simplexVectorEmpty.empty() == true);
typePairSimplexBool returnEmptyValue = st.insert_simplex(simplexVectorEmpty, 0.0);
@@ -98,8 +98,8 @@ bool AreAlmostTheSame(float a, float b) {
BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_from_file, typeST, list_of_tested_variants) {
// TEST OF INSERTION
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF SIMPLEX TREE FROM A FILE" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF SIMPLEX TREE FROM A FILE" << std::endl;
typeST st;
std::string inputFile("simplex_tree_for_unit_test.txt");
@@ -107,8 +107,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_from_file, typeST, list_of_tested_var
simplex_tree_stream >> st;
// Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << st.dimension() << std::endl;
+ std::clog << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << " - dimension " << st.dimension() << std::endl;
// Check
BOOST_CHECK(st.num_simplices() == 143353);
@@ -134,13 +134,13 @@ template<class typeST, class typeSimplex>
void test_simplex_tree_contains(typeST& simplexTree, typeSimplex& simplex, int pos) {
auto f_simplex = simplexTree.filtration_simplex_range().begin() + pos;
- std::cout << "test_simplex_tree_contains - filtration=" << simplexTree.filtration(*f_simplex) << "||" << simplex.second << std::endl;
+ std::clog << "test_simplex_tree_contains - filtration=" << simplexTree.filtration(*f_simplex) << "||" << simplex.second << std::endl;
BOOST_CHECK(AreAlmostTheSame(simplexTree.filtration(*f_simplex), simplex.second));
int simplexIndex = simplex.first.size() - 1;
std::sort(simplex.first.begin(), simplex.first.end()); // if the simplex wasn't sorted, the next test could fail
for (auto vertex : simplexTree.simplex_vertex_range(*f_simplex)) {
- std::cout << "test_simplex_tree_contains - vertex=" << vertex << "||" << simplex.first.at(simplexIndex) << std::endl;
+ std::clog << "test_simplex_tree_contains - vertex=" << vertex << "||" << simplex.first.at(simplexIndex) << std::endl;
BOOST_CHECK(vertex == simplex.first.at(simplexIndex));
BOOST_CHECK(simplexIndex >= 0);
simplexIndex--;
@@ -163,7 +163,7 @@ void set_and_test_simplex_tree_dim_fil(typeST& simplexTree, int vectorSize, cons
if (vectorSize > dim_max + 1) {
dim_max = vectorSize - 1;
simplexTree.set_dimension(dim_max);
- std::cout << " set_and_test_simplex_tree_dim_fil - dim_max=" << dim_max
+ std::clog << " set_and_test_simplex_tree_dim_fil - dim_max=" << dim_max
<< std::endl;
}
@@ -193,12 +193,12 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
dim_max = -1;
// TEST OF INSERTION
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF INSERTION" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF INSERTION" << std::endl;
typeST st;
// ++ FIRST
- std::cout << " - INSERT 0" << std::endl;
+ std::clog << " - INSERT 0" << std::endl;
typeVectorVertex firstSimplexVector{0};
BOOST_CHECK(firstSimplexVector.size() == 1);
typeSimplex firstSimplex = std::make_pair(firstSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
@@ -209,7 +209,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 1);
// ++ SECOND
- std::cout << " - INSERT 1" << std::endl;
+ std::clog << " - INSERT 1" << std::endl;
typeVectorVertex secondSimplexVector{1};
BOOST_CHECK(secondSimplexVector.size() == 1);
typeSimplex secondSimplex = std::make_pair(secondSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
@@ -220,7 +220,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 2);
// ++ THIRD
- std::cout << " - INSERT (0,1)" << std::endl;
+ std::clog << " - INSERT (0,1)" << std::endl;
typeVectorVertex thirdSimplexVector{0, 1};
BOOST_CHECK(thirdSimplexVector.size() == 2);
typeSimplex thirdSimplex = std::make_pair(thirdSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
@@ -231,7 +231,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 2); // Not incremented !!
// ++ FOURTH
- std::cout << " - INSERT 2" << std::endl;
+ std::clog << " - INSERT 2" << std::endl;
typeVectorVertex fourthSimplexVector{2};
BOOST_CHECK(fourthSimplexVector.size() == 1);
typeSimplex fourthSimplex = std::make_pair(fourthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
@@ -242,7 +242,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 3);
// ++ FIFTH
- std::cout << " - INSERT (2,0)" << std::endl;
+ std::clog << " - INSERT (2,0)" << std::endl;
typeVectorVertex fifthSimplexVector{2, 0};
BOOST_CHECK(fifthSimplexVector.size() == 2);
typeSimplex fifthSimplex = std::make_pair(fifthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
@@ -253,7 +253,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 3); // Not incremented !!
// ++ SIXTH
- std::cout << " - INSERT (2,1)" << std::endl;
+ std::clog << " - INSERT (2,1)" << std::endl;
typeVectorVertex sixthSimplexVector{2, 1};
BOOST_CHECK(sixthSimplexVector.size() == 2);
typeSimplex sixthSimplex = std::make_pair(sixthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
@@ -264,7 +264,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 3); // Not incremented !!
// ++ SEVENTH
- std::cout << " - INSERT (2,1,0)" << std::endl;
+ std::clog << " - INSERT (2,1,0)" << std::endl;
typeVectorVertex seventhSimplexVector{2, 1, 0};
BOOST_CHECK(seventhSimplexVector.size() == 3);
typeSimplex seventhSimplex = std::make_pair(seventhSimplexVector, Filtration_value(THIRD_FILTRATION_VALUE));
@@ -275,7 +275,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 3); // Not incremented !!
// ++ EIGHTH
- std::cout << " - INSERT 3" << std::endl;
+ std::clog << " - INSERT 3" << std::endl;
typeVectorVertex eighthSimplexVector{3};
BOOST_CHECK(eighthSimplexVector.size() == 1);
typeSimplex eighthSimplex = std::make_pair(eighthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
@@ -286,7 +286,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 4);
// ++ NINETH
- std::cout << " - INSERT (3,0)" << std::endl;
+ std::clog << " - INSERT (3,0)" << std::endl;
typeVectorVertex ninethSimplexVector{3, 0};
BOOST_CHECK(ninethSimplexVector.size() == 2);
typeSimplex ninethSimplex = std::make_pair(ninethSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
@@ -297,7 +297,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(st.num_vertices() == (size_t) 4); // Not incremented !!
// ++ TENTH
- std::cout << " - INSERT 0 (already inserted)" << std::endl;
+ std::clog << " - INSERT 0 (already inserted)" << std::endl;
typeVectorVertex tenthSimplexVector{0};
BOOST_CHECK(tenthSimplexVector.size() == 1);
// With a different filtration value
@@ -308,12 +308,12 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
// Simplex_handle = boost::container::flat_map< typeST::Vertex_handle, Node >::iterator
typename typeST::Simplex_handle shReturned = returnValue.first;
BOOST_CHECK(shReturned == typename typeST::Simplex_handle(nullptr));
- std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ std::clog << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == (size_t) 4); // Not incremented !!
BOOST_CHECK(st.dimension() == dim_max);
// ++ ELEVENTH
- std::cout << " - INSERT (2,1,0) (already inserted)" << std::endl;
+ std::clog << " - INSERT (2,1,0) (already inserted)" << std::endl;
typeVectorVertex eleventhSimplexVector{2, 1, 0};
BOOST_CHECK(eleventhSimplexVector.size() == 3);
typeSimplex eleventhSimplex = std::make_pair(eleventhSimplexVector, Filtration_value(FOURTH_FILTRATION_VALUE));
@@ -343,35 +343,35 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
// [0.2] 3 0
// [0.3] 2 1 0
// !! Be careful, simplex are sorted by filtration value on insertion !!
- std::cout << "simplex_tree_insertion - first - 0" << std::endl;
+ std::clog << "simplex_tree_insertion - first - 0" << std::endl;
test_simplex_tree_contains(st, firstSimplex, 0); // (0) -> 0
- std::cout << "simplex_tree_insertion - second - 1" << std::endl;
+ std::clog << "simplex_tree_insertion - second - 1" << std::endl;
test_simplex_tree_contains(st, secondSimplex, 1); // (1) -> 1
- std::cout << "simplex_tree_insertion - third - 4" << std::endl;
+ std::clog << "simplex_tree_insertion - third - 4" << std::endl;
test_simplex_tree_contains(st, thirdSimplex, 4); // (0,1) -> 4
- std::cout << "simplex_tree_insertion - fourth - 2" << std::endl;
+ std::clog << "simplex_tree_insertion - fourth - 2" << std::endl;
test_simplex_tree_contains(st, fourthSimplex, 2); // (2) -> 2
- std::cout << "simplex_tree_insertion - fifth - 5" << std::endl;
+ std::clog << "simplex_tree_insertion - fifth - 5" << std::endl;
test_simplex_tree_contains(st, fifthSimplex, 5); // (2,0) -> 5
- std::cout << "simplex_tree_insertion - sixth - 6" << std::endl;
+ std::clog << "simplex_tree_insertion - sixth - 6" << std::endl;
test_simplex_tree_contains(st, sixthSimplex, 6); //(2,1) -> 6
- std::cout << "simplex_tree_insertion - seventh - 8" << std::endl;
+ std::clog << "simplex_tree_insertion - seventh - 8" << std::endl;
test_simplex_tree_contains(st, seventhSimplex, 8); // (2,1,0) -> 8
- std::cout << "simplex_tree_insertion - eighth - 3" << std::endl;
+ std::clog << "simplex_tree_insertion - eighth - 3" << std::endl;
test_simplex_tree_contains(st, eighthSimplex, 3); // (3) -> 3
- std::cout << "simplex_tree_insertion - nineth - 7" << std::endl;
+ std::clog << "simplex_tree_insertion - nineth - 7" << std::endl;
test_simplex_tree_contains(st, ninethSimplex, 7); // (3,0) -> 7
// Display the Simplex_tree - Can not be done in the middle of 2 inserts
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << st.dimension() << std::endl;
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << " - dimension " << st.dimension() << std::endl;
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
+ std::clog << (int) vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
@@ -380,14 +380,14 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
typedef std::pair<typename typeST::Simplex_handle, bool> typePairSimplexBool;
typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
typedef std::pair<typeVectorVertex, typename typeST::Filtration_value> typeSimplex;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST OF RECURSIVE INSERTION" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST OF RECURSIVE INSERTION" << std::endl;
typeST st;
typePairSimplexBool returnValue;
int position = 0;
// ++ FIRST
- std::cout << " - INSERT (2,1,0)" << std::endl;
+ std::clog << " - INSERT (2,1,0)" << std::endl;
typeVectorVertex SimplexVector1{2, 1, 0};
BOOST_CHECK(SimplexVector1.size() == 3);
returnValue = st.insert_simplex_and_subfaces(SimplexVector1);
@@ -400,13 +400,13 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
std::sort(SimplexVector1.begin(), SimplexVector1.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
- std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector1[position] << std::endl;
+ std::clog << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector1[position] << std::endl;
BOOST_CHECK(vertex == SimplexVector1[position]);
position++;
}
// ++ SECOND
- std::cout << " - INSERT 3" << std::endl;
+ std::clog << " - INSERT 3" << std::endl;
typeVectorVertex SimplexVector2{3};
BOOST_CHECK(SimplexVector2.size() == 1);
returnValue = st.insert_simplex_and_subfaces(SimplexVector2);
@@ -419,13 +419,13 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
std::sort(SimplexVector2.begin(), SimplexVector2.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
- std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector2[position] << std::endl;
+ std::clog << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector2[position] << std::endl;
BOOST_CHECK(vertex == SimplexVector2[position]);
position++;
}
// ++ THIRD
- std::cout << " - INSERT (0,3)" << std::endl;
+ std::clog << " - INSERT (0,3)" << std::endl;
typeVectorVertex SimplexVector3{3, 0};
BOOST_CHECK(SimplexVector3.size() == 2);
returnValue = st.insert_simplex_and_subfaces(SimplexVector3);
@@ -438,13 +438,13 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
std::sort(SimplexVector3.begin(), SimplexVector3.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
- std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector3[position] << std::endl;
+ std::clog << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector3[position] << std::endl;
BOOST_CHECK(vertex == SimplexVector3[position]);
position++;
}
// ++ FOURTH
- std::cout << " - INSERT (1,0) (already inserted)" << std::endl;
+ std::clog << " - INSERT (1,0) (already inserted)" << std::endl;
typeVectorVertex SimplexVector4{1, 0};
BOOST_CHECK(SimplexVector4.size() == 2);
returnValue = st.insert_simplex_and_subfaces(SimplexVector4);
@@ -455,7 +455,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
BOOST_CHECK(false == returnValue.second);
// ++ FIFTH
- std::cout << " - INSERT (3,4,5)" << std::endl;
+ std::clog << " - INSERT (3,4,5)" << std::endl;
typeVectorVertex SimplexVector5{3, 4, 5};
BOOST_CHECK(SimplexVector5.size() == 3);
returnValue = st.insert_simplex_and_subfaces(SimplexVector5);
@@ -468,13 +468,13 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
std::sort(SimplexVector5.begin(), SimplexVector5.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
- std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector5[position] << std::endl;
+ std::clog << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector5[position] << std::endl;
BOOST_CHECK(vertex == SimplexVector5[position]);
position++;
}
// ++ SIXTH
- std::cout << " - INSERT (0,1,6,7)" << std::endl;
+ std::clog << " - INSERT (0,1,6,7)" << std::endl;
typeVectorVertex SimplexVector6{0, 1, 6, 7};
BOOST_CHECK(SimplexVector6.size() == 4);
returnValue = st.insert_simplex_and_subfaces(SimplexVector6);
@@ -487,7 +487,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
std::sort(SimplexVector6.begin(), SimplexVector6.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
- std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector6[position] << std::endl;
+ std::clog << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector6[position] << std::endl;
BOOST_CHECK(vertex == SimplexVector6[position]);
position++;
}
@@ -525,63 +525,63 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
// ------------------------------------------------------------------------------------------------------------------
typeVectorVertex simpleSimplexVector{1};
typename typeST::Simplex_handle simplexFound = st.find(simpleSimplexVector);
- std::cout << "**************IS THE SIMPLEX {1} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != st.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
// Check it is found
BOOST_CHECK(simplexFound != st.null_simplex());
typeVectorVertex unknownSimplexVector{15};
simplexFound = st.find(unknownSimplexVector);
- std::cout << "**************IS THE SIMPLEX {15} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {15} IN THE SIMPLEX TREE ?\n";
if (simplexFound != st.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
// Check it is NOT found
BOOST_CHECK(simplexFound == st.null_simplex());
simplexFound = st.find(SimplexVector6);
- std::cout << "**************IS THE SIMPLEX {0,1,6,7} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {0,1,6,7} IN THE SIMPLEX TREE ?\n";
if (simplexFound != st.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
// Check it is found
BOOST_CHECK(simplexFound != st.null_simplex());
typeVectorVertex otherSimplexVector{1, 15};
simplexFound = st.find(otherSimplexVector);
- std::cout << "**************IS THE SIMPLEX {15,1} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {15,1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != st.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
// Check it is NOT found
BOOST_CHECK(simplexFound == st.null_simplex());
typeVectorVertex invSimplexVector{1, 2, 0};
simplexFound = st.find(invSimplexVector);
- std::cout << "**************IS THE SIMPLEX {1,2,0} IN THE SIMPLEX TREE ?\n";
+ std::clog << "**************IS THE SIMPLEX {1,2,0} IN THE SIMPLEX TREE ?\n";
if (simplexFound != st.null_simplex())
- std::cout << "***+ YES IT IS!\n";
+ std::clog << "***+ YES IT IS!\n";
else
- std::cout << "***- NO IT ISN'T\n";
+ std::clog << "***- NO IT ISN'T\n";
// Check it is found
BOOST_CHECK(simplexFound != st.null_simplex());
// Display the Simplex_tree - Can not be done in the middle of 2 inserts
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << st.dimension() << std::endl;
- std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ std::clog << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+ std::clog << " - dimension " << st.dimension() << std::endl;
+ std::clog << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::clog << " " << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
+ std::clog << (int) vertex << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
@@ -595,17 +595,17 @@ void test_cofaces(typeST& st, const std::vector<Vertex_handle>& expected, int di
for (auto simplex = cofaces.begin(); simplex != cofaces.end(); ++simplex) {
typename typeST::Simplex_vertex_range rg = st.simplex_vertex_range(*simplex);
for (auto vertex = rg.begin(); vertex != rg.end(); ++vertex) {
- std::cout << "(" << *vertex << ")";
+ std::clog << "(" << *vertex << ")";
}
- std::cout << std::endl;
+ std::clog << std::endl;
BOOST_CHECK(std::find(res.begin(), res.end(), *simplex) != res.end());
}
}
BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_variants) {
typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST COFACE ALGORITHM" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST COFACE ALGORITHM" << std::endl;
typeST st;
typeVectorVertex SimplexVector{2, 1, 0};
@@ -631,7 +631,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
std::vector<typename typeST::Vertex_handle> simplex_result;
std::vector<typename typeST::Simplex_handle> result;
- std::cout << "First test - Star of (3):" << std::endl;
+ std::clog << "First test - Star of (3):" << std::endl;
simplex_result = {3};
result.push_back(st.find(simplex_result));
@@ -656,7 +656,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
vertex.push_back(1);
vertex.push_back(7);
- std::cout << "Second test - Star of (1,7): " << std::endl;
+ std::clog << "Second test - Star of (1,7): " << std::endl;
simplex_result = {7, 1};
result.push_back(st.find(simplex_result));
@@ -673,7 +673,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
test_cofaces(st, vertex, 0, result);
result.clear();
- std::cout << "Third test - 2-dimension Cofaces of simplex(1,7) : " << std::endl;
+ std::clog << "Third test - 2-dimension Cofaces of simplex(1,7) : " << std::endl;
simplex_result = {7, 1, 0};
result.push_back(st.find(simplex_result));
@@ -684,15 +684,15 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
test_cofaces(st, vertex, 1, result);
result.clear();
- std::cout << "Cofaces with a codimension too high (codimension + vetices > tree.dimension) :" << std::endl;
+ std::clog << "Cofaces with a codimension too high (codimension + vetices > tree.dimension) :" << std::endl;
test_cofaces(st, vertex, 5, result);
- //std::cout << "Cofaces with an empty codimension" << std::endl;
+ //std::clog << "Cofaces with an empty codimension" << std::endl;
//test_cofaces(st, vertex, -1, result);
- // std::cout << "Cofaces in an empty simplex tree" << std::endl;
+ // std::clog << "Cofaces in an empty simplex tree" << std::endl;
// typeST empty_tree;
// test_cofaces(empty_tree, vertex, 1, result);
- //std::cout << "Cofaces of an empty simplex" << std::endl;
+ //std::clog << "Cofaces of an empty simplex" << std::endl;
//vertex.clear();
// test_cofaces(st, vertex, 1, result);
@@ -700,8 +700,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
BOOST_AUTO_TEST_CASE_TEMPLATE(copy_move_on_simplex_tree, typeST, list_of_tested_variants) {
typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST COPY MOVE CONSTRUCTORS" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST COPY MOVE CONSTRUCTORS" << std::endl;
typeST st;
typeVectorVertex SimplexVector{2, 1, 0};
@@ -725,11 +725,11 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(copy_move_on_simplex_tree, typeST, list_of_tested_
/* o */
/* 5 */
- std::cout << "Printing st - address = " << &st << std::endl;
+ std::clog << "Printing st - address = " << &st << std::endl;
// Copy constructor
typeST st_copy = st;
- std::cout << "Printing a copy of st - address = " << &st_copy << std::endl;
+ std::clog << "Printing a copy of st - address = " << &st_copy << std::endl;
// Check the data are the same
BOOST_CHECK(st == st_copy);
@@ -738,7 +738,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(copy_move_on_simplex_tree, typeST, list_of_tested_
// Move constructor
typeST st_move = std::move(st);
- std::cout << "Printing a move of st - address = " << &st_move << std::endl;
+ std::clog << "Printing a move of st - address = " << &st_move << std::endl;
// Check the data are the same
BOOST_CHECK(st_move == st_copy);
@@ -753,7 +753,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(copy_move_on_simplex_tree, typeST, list_of_tested_
BOOST_CHECK(st.num_simplices() == 0);
BOOST_CHECK(st.num_vertices() == (size_t)0);
- std::cout << "Printing st once again- address = " << &st << std::endl;
+ std::clog << "Printing st once again- address = " << &st << std::endl;
}
template<class typeST>
@@ -768,22 +768,22 @@ void test_simplex_is_vertex(typeST& st, typename typeST::Simplex_handle sh, type
BOOST_AUTO_TEST_CASE(non_contiguous) {
typedef Simplex_tree<> typeST;
typedef typeST::Simplex_handle Simplex_handle;
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST NON-CONTIGUOUS VERTICES" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST NON-CONTIGUOUS VERTICES" << std::endl;
typeST st;
typeST::Vertex_handle e[] = {3,-7};
- std::cout << "Insert" << std::endl;
+ std::clog << "Insert" << std::endl;
st.insert_simplex_and_subfaces(e);
BOOST_CHECK(st.num_vertices() == 2);
BOOST_CHECK(st.num_simplices() == 3);
- std::cout << "Find" << std::endl;
+ std::clog << "Find" << std::endl;
Simplex_handle sh = st.find(e);
BOOST_CHECK(sh != st.null_simplex());
- std::cout << "Endpoints" << std::endl;
+ std::clog << "Endpoints" << std::endl;
auto p = st.endpoints(sh);
test_simplex_is_vertex(st, p.first, 3);
test_simplex_is_vertex(st, p.second, -7);
- std::cout << "Boundary" << std::endl;
+ std::clog << "Boundary" << std::endl;
auto&& b = st.boundary_simplex_range(sh);
auto i = std::begin(b);
test_simplex_is_vertex(st, *i, -7);
@@ -812,8 +812,8 @@ typedef boost::mpl::list<boost::adjacency_list<boost::setS, boost::vecS, boost::
boost::property<edge_filtration_t, double>>> list_of_graph_variants;
BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insert_graph, Graph, list_of_graph_variants) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "INSERT GRAPH" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "INSERT GRAPH" << std::endl;
Graph g(3);
// filtration value 0 everywhere
@@ -840,18 +840,18 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insert_graph, Graph, list_of_graph_va
st2.insert_graph(g);
BOOST_CHECK(st2.num_simplices() == 6);
- std::cout << "st1 is" << std::endl;
- std::cout << st1 << std::endl;
+ std::clog << "st1 is" << std::endl;
+ std::clog << st1 << std::endl;
- std::cout << "st2 is" << std::endl;
- std::cout << st2 << std::endl;
+ std::clog << "st2 is" << std::endl;
+ std::clog << st2 << std::endl;
BOOST_CHECK(st1 == st2);
}
BOOST_AUTO_TEST_CASE_TEMPLATE(insert_duplicated_vertices, typeST, list_of_tested_variants) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "TEST INSERT DUPLICATED VERTICES" << std::endl;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST INSERT DUPLICATED VERTICES" << std::endl;
typeST st;
typename typeST::Simplex_handle sh;
@@ -859,25 +859,25 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(insert_duplicated_vertices, typeST, list_of_tested
std::tie(sh, success) = st.insert_simplex_and_subfaces({1});
BOOST_CHECK(success);
BOOST_CHECK(sh != st.null_simplex());
- std::cout << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
+ std::clog << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
BOOST_CHECK(st.dimension(sh) == 0);
std::tie(sh, success) = st.insert_simplex_and_subfaces({2, 2});
BOOST_CHECK(success);
BOOST_CHECK(sh != st.null_simplex());
- std::cout << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
+ std::clog << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
BOOST_CHECK(st.dimension(sh) == 0);
std::tie(sh, success) = st.insert_simplex_and_subfaces({3, 3, 3});
BOOST_CHECK(success);
BOOST_CHECK(sh != st.null_simplex());
- std::cout << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
+ std::clog << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
BOOST_CHECK(st.dimension(sh) == 0);
std::tie(sh, success) = st.insert_simplex_and_subfaces({4, 4, 4, 4});
BOOST_CHECK(success);
BOOST_CHECK(sh != st.null_simplex());
- std::cout << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
+ std::clog << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
BOOST_CHECK(st.dimension(sh) == 0);
- std::cout << "dimension =" << st.dimension() << " - num_vertices = " << st.num_vertices()
+ std::clog << "dimension =" << st.dimension() << " - num_vertices = " << st.num_vertices()
<< " - num_simplices = " << st.num_simplices() << std::endl;
BOOST_CHECK(st.dimension() == 0);
BOOST_CHECK(st.num_simplices() == st.num_vertices());
@@ -885,10 +885,10 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(insert_duplicated_vertices, typeST, list_of_tested
std::tie(sh, success) = st.insert_simplex_and_subfaces({2, 1, 1, 2});
BOOST_CHECK(success);
BOOST_CHECK(sh != st.null_simplex());
- std::cout << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
+ std::clog << "st.dimension(sh)= " << st.dimension(sh) << std::endl;
BOOST_CHECK(st.dimension(sh) == 1);
- std::cout << "dimension =" << st.dimension() << " - num_vertices = " << st.num_vertices()
+ std::clog << "dimension =" << st.dimension() << " - num_vertices = " << st.num_vertices()
<< " - num_simplices = " << st.num_simplices() << std::endl;
BOOST_CHECK(st.dimension() == 1);
BOOST_CHECK(st.num_simplices() == st.num_vertices() + 1);
@@ -898,7 +898,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(insert_duplicated_vertices, typeST, list_of_tested
BOOST_CHECK(!success);
BOOST_CHECK(sh == st.null_simplex());
- std::cout << "dimension =" << st.dimension() << " - num_vertices = " << st.num_vertices()
+ std::clog << "dimension =" << st.dimension() << " - num_vertices = " << st.num_vertices()
<< " - num_simplices = " << st.num_simplices() << std::endl;
BOOST_CHECK(st.dimension() == 1);
BOOST_CHECK(st.num_simplices() == st.num_vertices() + 1);
diff --git a/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp b/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp
index 486827eb..d04ca289 100644
--- a/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp
+++ b/src/Skeleton_blocker/example/Skeleton_blocker_from_simplices.cpp
@@ -35,13 +35,13 @@ int main(int argc, char *argv[]) {
Complex complex(Gudhi::skeleton_blocker::make_complex_from_top_faces<Complex>(simplices.begin(), simplices.end()));
- std::cout << "Simplices:" << std::endl;
+ std::clog << "Simplices:" << std::endl;
for (const Simplex & s : complex.complex_simplex_range())
- std::cout << s << " ";
- std::cout << std::endl;
+ std::clog << s << " ";
+ std::clog << std::endl;
// One blocker as simplex 0123 is not in the complex but all its proper faces are.
- std::cout << "Blockers: " << complex.blockers_to_string() << std::endl;
+ std::clog << "Blockers: " << complex.blockers_to_string() << std::endl;
// now build a complex from its full list of simplices
simplices.clear();
@@ -53,13 +53,13 @@ int main(int argc, char *argv[]) {
simplices.push_back(Simplex(Vertex_handle(2), Vertex_handle(0)));
complex = Complex(simplices.begin(), simplices.end());
- std::cout << "Simplices:" << std::endl;
+ std::clog << "Simplices:" << std::endl;
for (const Simplex & s : complex.complex_simplex_range())
- std::cout << s << " ";
- std::cout << std::endl;
+ std::clog << s << " ";
+ std::clog << std::endl;
// One blocker as simplex 012 is not in the complex but all its proper faces are.
- std::cout << "Blockers: " << complex.blockers_to_string() << std::endl;
+ std::clog << "Blockers: " << complex.blockers_to_string() << std::endl;
return EXIT_SUCCESS;
}
diff --git a/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp b/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp
index 7f301047..62084692 100644
--- a/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp
+++ b/src/Skeleton_blocker/example/Skeleton_blocker_iteration.cpp
@@ -45,7 +45,7 @@ int main(int argc, char *argv[]) {
// more appropriated!
unsigned num_vertices = 0;
for (auto v : complex.vertex_range()) {
- std::cout << "Vertex " << v << std::endl;
+ std::clog << "Vertex " << v << std::endl;
++num_vertices;
}
@@ -65,9 +65,9 @@ int main(int argc, char *argv[]) {
else
euler -= 1;
}
- std::cout << "Saw " << num_vertices << " vertices, " << num_edges << " edges and " << num_simplices << " simplices"
+ std::clog << "Saw " << num_vertices << " vertices, " << num_edges << " edges and " << num_simplices << " simplices"
<< std::endl;
- std::cout << "The Euler Characteristic is " << euler << std::endl;
- std::cout << skbl_chrono;
+ std::clog << "The Euler Characteristic is " << euler << std::endl;
+ std::clog << skbl_chrono;
return EXIT_SUCCESS;
}
diff --git a/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp b/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp
index e634b656..ba7ce43c 100644
--- a/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp
+++ b/src/Skeleton_blocker/example/Skeleton_blocker_link.cpp
@@ -32,25 +32,25 @@ int main(int argc, char *argv[]) {
Simplex tetrahedron(Vertex_handle(0), Vertex_handle(1), Vertex_handle(2), Vertex_handle(3));
complex.add_simplex(tetrahedron);
- std::cout << "complex:" << complex.to_string() << std::endl;
+ std::clog << "complex:" << complex.to_string() << std::endl;
// build the link of vertex 1, eg a triangle {0,2,3}
auto link = complex.link(Vertex_handle(1));
- std::cout << "link:" << link.to_string() << std::endl;
+ std::clog << "link:" << link.to_string() << std::endl;
// Internally link is a subcomplex of 'complex' and its vertices are stored in a vector.
// They can be accessed via Vertex_handle(x) where x is an index of the vector.
// In that example, link has three vertices and thus it contains only
// Vertex_handle(0),Vertex_handle(1) and Vertex_handle(2) are).
for (int i = 0; i < 5; ++i)
- std::cout << "link.contains_vertex(Vertex_handle(" << i << ")):" << link.contains_vertex(Vertex_handle(i)) <<
+ std::clog << "link.contains_vertex(Vertex_handle(" << i << ")):" << link.contains_vertex(Vertex_handle(i)) <<
std::endl;
- std::cout << std::endl;
+ std::clog << std::endl;
// To access to the initial vertices eg (0,1,2,3,4), Root_vertex_handle must be used.
// For instance, to test if the link contains the vertex that was labeled i:
for (int i = 0; i < 5; ++i)
- std::cout << "link.contains_vertex(Root_vertex_handle(" << i << ")):" <<
+ std::clog << "link.contains_vertex(Root_vertex_handle(" << i << ")):" <<
link.contains_vertex(Root_vertex_handle(i)) << std::endl;
return EXIT_SUCCESS;
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
index bcca851f..653a63fd 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
@@ -154,8 +154,8 @@ of a simplicial complex.
else
euler -= 1;
}
- std::cout << "Saw "<<num_vertices<<" vertices, "<<num_edges<<" edges and "<<num_simplices<<" simplices"<<std::endl;
- std::cout << "The Euler Characteristic is "<<euler<<std::endl;
+ std::clog << "Saw "<<num_vertices<<" vertices, "<<num_edges<<" edges and "<<num_simplices<<" simplices"<<std::endl;
+ std::clog << "The Euler Characteristic is "<<euler<<std::endl;
\endcode
@@ -182,13 +182,13 @@ The Euler Characteristic is 1
//get complex from top faces
make_complex_from_top_faces(complex,simplices.begin(),simplices.end());
- std::cout << "Simplices:"<<std::endl;
+ std::clog << "Simplices:"<<std::endl;
for(const Simplex & s : complex.star_simplex_range())
- std::cout << s << " ";
- std::cout << std::endl;
+ std::clog << s << " ";
+ std::clog << std::endl;
//One blocker as simplex 0123 is not in the complex but all its proper faces are.
- std::cout << "Blockers: "<<complex.blockers_to_string()<<std::endl;
+ std::clog << "Blockers: "<<complex.blockers_to_string()<<std::endl;
//now build a complex from its full list of simplices
simplices.clear();
@@ -200,13 +200,13 @@ The Euler Characteristic is 1
simplices.push_back(Simplex(Vertex_handle(2),Vertex_handle(0)));
complex = Complex(simplices.begin(),simplices.end());
- std::cout << "Simplices:"<<std::endl;
+ std::clog << "Simplices:"<<std::endl;
for(const Simplex & s : complex.star_simplex_range())
- std::cout << s << " ";
- std::cout << std::endl;
+ std::clog << s << " ";
+ std::clog << std::endl;
//One blocker as simplex 012 is not in the complex but all its proper faces are.
- std::cout << "Blockers: "<<complex.blockers_to_string()<<std::endl;
+ std::clog << "Blockers: "<<complex.blockers_to_string()<<std::endl;
\endcode
\verbatim
./SkeletonBlockerFromSimplices
diff --git a/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp b/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp
index 4336e33b..96438acf 100644
--- a/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp
+++ b/src/Skeleton_blocker/test/test_skeleton_blocker_complex.cpp
@@ -91,10 +91,10 @@ BOOST_AUTO_TEST_CASE(test_skeleton_num_simplices) {
BOOST_AUTO_TEST_CASE(test_skeleton_iterator_vertices1) {
int n = 10;
Complex complex(10);
- std::cout << "complex.num_vertices():" << complex.num_vertices() << std::endl;
+ std::clog << "complex.num_vertices():" << complex.num_vertices() << std::endl;
int num_vertex_seen = 0;
for (auto vi : complex.vertex_range()) {
- std::cout << "vertex:" << vi << std::endl;
+ std::clog << "vertex:" << vi << std::endl;
++num_vertex_seen;
}
BOOST_CHECK(num_vertex_seen == n);
@@ -104,14 +104,14 @@ BOOST_AUTO_TEST_CASE(test_skeleton_iterator_vertices2) {
int n = 10;
Complex complex;
build_complete(10, complex);
- std::cout << "complex.num_vertices():" << complex.num_vertices() << std::endl;
- std::cout << "complex.num_edges():" << complex.num_edges() << std::endl;
+ std::clog << "complex.num_vertices():" << complex.num_vertices() << std::endl;
+ std::clog << "complex.num_edges():" << complex.num_edges() << std::endl;
int num_vertex_seen = 0;
for (auto vi : complex.vertex_range(Vertex_handle(2))) {
- std::cout << "vertex:" << vi << std::endl;
+ std::clog << "vertex:" << vi << std::endl;
++num_vertex_seen;
}
- std::cout << "num_vertex_seen:" << num_vertex_seen << std::endl;
+ std::clog << "num_vertex_seen:" << num_vertex_seen << std::endl;
BOOST_CHECK(num_vertex_seen == (n -1));
}
@@ -123,10 +123,10 @@ BOOST_AUTO_TEST_CASE(test_skeleton_iterator_edge) {
complex.add_edge_without_blockers(Vertex_handle(i), Vertex_handle(j));
complex.remove_edge(Vertex_handle(2), Vertex_handle(3));
complex.remove_edge(Vertex_handle(3), Vertex_handle(5));
- std::cout << "complex.num_edges():" << complex.num_edges() << std::endl;
+ std::clog << "complex.num_edges():" << complex.num_edges() << std::endl;
int num_edges_seen = 0;
for (auto edge : complex.edge_range()) {
- std::cout << "edge :" << complex[edge] << std::endl;
+ std::clog << "edge :" << complex[edge] << std::endl;
++num_edges_seen;
}
@@ -141,10 +141,10 @@ BOOST_AUTO_TEST_CASE(test_skeleton_iterator_edge2) {
complex.add_edge_without_blockers(Vertex_handle(i), Vertex_handle(j));
complex.remove_edge(Vertex_handle(2), Vertex_handle(3));
complex.remove_edge(Vertex_handle(3), Vertex_handle(5));
- std::cout << "complex.num_edges():" << complex.num_edges() << std::endl;
+ std::clog << "complex.num_edges():" << complex.num_edges() << std::endl;
int num_neigbors_seen = 0;
for (auto neighbor : complex.vertex_range(Vertex_handle(2))) {
- std::cout << "neighbor" << neighbor << std::endl;
+ std::clog << "neighbor" << neighbor << std::endl;
++num_neigbors_seen;
}
BOOST_CHECK(num_neigbors_seen == 8);
@@ -160,7 +160,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_iterator_triangles) {
complex.add_edge_without_blockers(Vertex_handle(i), Vertex_handle(i + 1));
complex.add_edge_without_blockers(Vertex_handle(1), Vertex_handle(6));
- std::cout << complex.to_string() << std::endl;
+ std::clog << complex.to_string() << std::endl;
int num_triangles_seen = 0;
//for (auto t : complex.triangle_range(5)){
@@ -214,19 +214,19 @@ BOOST_AUTO_TEST_CASE(test_skeleton_iterator_simplices) {
expected_num_simplices[Vertex_handle(5)] = 7;
for (auto pair : expected_num_simplices) {
- std::cout << "found list: ";
+ std::clog << "found list: ";
unsigned num_simplices_around = 0;
for (const auto& simplex : complex.star_simplex_range(pair.first)) {
simplex.dimension();
- std::cout << simplex << " - ";
+ std::clog << simplex << " - ";
++num_simplices_around;
}
BOOST_CHECK(num_simplices_around == pair.second);
- std::cout << std::endl << "current vertex:" << pair.first << " - ";
- std::cout << "expected_num_simplices:" << pair.second << " - ";
- std::cout << "found:" << num_simplices_around << std::endl;
+ std::clog << std::endl << "current vertex:" << pair.first << " - ";
+ std::clog << "expected_num_simplices:" << pair.second << " - ";
+ std::clog << "found:" << num_simplices_around << std::endl;
}
}
@@ -276,19 +276,19 @@ BOOST_AUTO_TEST_CASE(test_skeleton_iterator_simplices3) {
BOOST_AUTO_TEST_CASE(test_skeleton_iterator_simplices4) {
Complex empty_complex;
for (auto v : empty_complex.vertex_range()) {
- std::cout << v;
+ std::clog << v;
BOOST_CHECK(false);
}
for (auto e : empty_complex.edge_range()) {
- std::cout << e;
+ std::clog << e;
BOOST_CHECK(false);
}
for (auto t : empty_complex.triangle_range()) {
- std::cout << t;
+ std::clog << t;
BOOST_CHECK(false);
}
for (auto s : empty_complex.complex_simplex_range()) {
- std::cout << s;
+ std::clog << s;
BOOST_CHECK(false);
}
}
@@ -297,7 +297,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_iterator_coboundary) {
Complex c;
build_complete(4, c);
c.remove_edge(Vertex_handle(1), Vertex_handle(3));
- std::cout << c.to_string();
+ std::clog << c.to_string();
Simplex s02(Vertex_handle(0), Vertex_handle(2));
int n = 0;
std::set<Simplex> expected;
@@ -373,7 +373,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_link0) {
auto L2 = complex.link(alpha);
BOOST_CHECK(L == L2);
- std::cout << L.to_string();
+ std::clog << L.to_string();
BOOST_CHECK(L.contains_vertex(*L.get_address(Root_vertex_handle(b))));
BOOST_CHECK(L.contains_vertex(*L.get_address(Root_vertex_handle(d))));
@@ -432,9 +432,9 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_link2) {
// Complexes built
// Print result
- std::cout << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
- std::cout << "L= Link_complex(" << alpha << ") : \n" << L.to_string();
+ std::clog << "complex complex" << complex.to_string();
+ std::clog << std::endl << std::endl;
+ std::clog << "L= Link_complex(" << alpha << ") : \n" << L.to_string();
auto L2 = complex.link(alpha);
BOOST_CHECK(L == L2);
@@ -472,9 +472,9 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_link3) {
// Complexes built
// Print result
- std::cout << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
- std::cout << "L= Link_complex(" << alpha << ") : \n" << L.to_string();
+ std::clog << "complex complex" << complex.to_string();
+ std::clog << std::endl << std::endl;
+ std::clog << "L= Link_complex(" << alpha << ") : \n" << L.to_string();
auto L2 = complex.link(alpha);
BOOST_CHECK(L == L2);
@@ -529,8 +529,8 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_link5) {
// Complexes built
// Print result
- std::cout << "Complex: " << complex.to_string()<< std::endl << std::endl;
- std::cout << "Link: " << L.to_string() << std::endl;
+ std::clog << "Complex: " << complex.to_string()<< std::endl << std::endl;
+ std::clog << "Link: " << L.to_string() << std::endl;
// verification
BOOST_CHECK(L.num_vertices() == 0);
@@ -549,8 +549,8 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_link6) {
build_link_of_blocker(complex, alpha, link_blocker_alpha);
// Print result
- std::cout << "Complex: " << complex.to_string()<< std::endl << std::endl;
- std::cout << "Link: " << link_blocker_alpha.to_string() << std::endl;
+ std::clog << "Complex: " << complex.to_string()<< std::endl << std::endl;
+ std::clog << "Link: " << link_blocker_alpha.to_string() << std::endl;
// verification
BOOST_CHECK(link_blocker_alpha.num_vertices() == 1);
@@ -579,12 +579,12 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_link7) {
//the result should be the edge {6,7} plus the blocker {0,1,2}
// Print result
- std::cout << "Complex: " << complex.to_string()<< std::endl << std::endl;
- std::cout << "Link: " << link_blocker_alpha.to_string() << std::endl;
+ std::clog << "Complex: " << complex.to_string()<< std::endl << std::endl;
+ std::clog << "Link: " << link_blocker_alpha.to_string() << std::endl;
Skeleton_blocker_link_complex link_blocker_alpha_cpy = link_blocker_alpha;
- std::cout << "Link copy: " << link_blocker_alpha_cpy.to_string() << std::endl;
+ std::clog << "Link copy: " << link_blocker_alpha_cpy.to_string() << std::endl;
BOOST_CHECK(link_blocker_alpha.num_vertices() == link_blocker_alpha_cpy.num_vertices());
BOOST_CHECK(link_blocker_alpha.num_blockers() == link_blocker_alpha_cpy.num_blockers());
@@ -640,7 +640,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor) {
Complex complex(simplices.begin(), simplices.end());
- std::cout << "Constructor 1:\n" << complex.to_string();
+ std::clog << "Constructor 1:\n" << complex.to_string();
BOOST_CHECK(complex.num_vertices() == 6);
BOOST_CHECK(complex.num_edges() == 10);
@@ -677,10 +677,10 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor2) {
Complex complex(simplices.begin(), simplices.end());
- std::cout << "Constructor 2:\n" << complex.to_string();
+ std::clog << "Constructor 2:\n" << complex.to_string();
for (auto b : complex.const_blocker_range()) {
- std::cout << "b:" << b << std::endl;
+ std::clog << "b:" << b << std::endl;
}
BOOST_CHECK(complex.num_vertices() == 5);
@@ -698,7 +698,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor3) {
Complex complex(simplices.begin(), simplices.end());
- std::cout << "Constructor 3:\n" << complex.to_string();
+ std::clog << "Constructor 3:\n" << complex.to_string();
BOOST_CHECK(complex.num_blockers() == 1);
Sh expected_blocker(Vh(0), Vh(1), Vh(2));
@@ -723,7 +723,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor4) {
Complex complex(simplices.begin(), simplices.end());
- std::cout << "Constructor 4:\n" << complex.to_string();
+ std::clog << "Constructor 4:\n" << complex.to_string();
BOOST_CHECK(complex.num_blockers() == 1);
Sh expected_blocker(Vh(0), Vh(1), Vh(4));
for (auto b : complex.const_blocker_range())
@@ -753,7 +753,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor5) {
Complex complex(simplices.begin(), simplices.end());
- std::cout << "Constructor 5:\n" << complex.to_string();
+ std::clog << "Constructor 5:\n" << complex.to_string();
BOOST_CHECK(complex.num_vertices() == 6);
BOOST_CHECK(complex.num_blockers() == 3);
@@ -773,7 +773,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor6) {
Complex complex(simplices.begin(), simplices.end());
- std::cout << "Constructor 6:\n" << complex.to_string();
+ std::clog << "Constructor 6:\n" << complex.to_string();
BOOST_CHECK(complex.num_vertices() == 4);
BOOST_CHECK(complex.num_blockers() == 1);
@@ -795,7 +795,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor7) {
//get complex from top faces
Complex complex(Gudhi::skeleton_blocker::make_complex_from_top_faces<Complex>(simplices.begin(), simplices.end()));
- std::cout << "Constructor 7:\n" << complex.to_string();
+ std::clog << "Constructor 7:\n" << complex.to_string();
BOOST_CHECK(complex.num_vertices() == 4);
BOOST_CHECK(complex.num_blockers() == 1);
@@ -818,7 +818,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_complex_constructor8) {
//get complex from top faces
Complex complex(Gudhi::skeleton_blocker::make_complex_from_top_faces<Complex>(simplices.begin(), simplices.end()));
- std::cout << "Constructor 8:\n" << complex.to_string();
+ std::clog << "Constructor 8:\n" << complex.to_string();
BOOST_CHECK(complex.num_vertices() == 4);
BOOST_CHECK(complex.num_blockers() == 2);
diff --git a/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp b/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp
index 8cad97a1..9042ddcf 100644
--- a/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp
+++ b/src/Skeleton_blocker/test/test_skeleton_blocker_geometric_complex.cpp
@@ -36,7 +36,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_off_reader_writer) {
Gudhi::skeleton_blocker::Skeleton_blocker_off_reader<Complex> off_reader("test2.off", complex);
BOOST_CHECK(off_reader.is_valid());
- std::cout << "complex has " <<
+ std::clog << "complex has " <<
complex.num_vertices() << " vertices, " <<
complex.num_blockers() << " blockers, " <<
complex.num_edges() << " edges and " <<
@@ -50,8 +50,8 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_off_reader_writer) {
Complex same;
Gudhi::skeleton_blocker::Skeleton_blocker_off_reader<Complex> off_reader2("tmp.off", same);
- std::cout << "\ncomplex:" << complex.to_string() << std::endl;
- std::cout << "\nsame:" << same.to_string() << std::endl;
+ std::clog << "\ncomplex:" << complex.to_string() << std::endl;
+ std::clog << "\nsame:" << same.to_string() << std::endl;
BOOST_CHECK(complex == same);
}
@@ -61,7 +61,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_abstract_link) {
Gudhi::skeleton_blocker::Skeleton_blocker_off_reader<Complex> off_reader("test2.off", complex);
BOOST_CHECK(off_reader.is_valid());
- std::cout << "complex has " <<
+ std::clog << "complex has " <<
complex.num_vertices() << " vertices, " <<
complex.num_blockers() << " blockers, " <<
complex.num_edges() << " edges and " <<
@@ -73,7 +73,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_abstract_link) {
auto link_0 = complex.abstract_link(Vertex_handle(0));
- std::cout << "\n link(0):" << link_0.to_string() << std::endl;
+ std::clog << "\n link(0):" << link_0.to_string() << std::endl;
BOOST_CHECK(link_0.num_vertices() == 2);
BOOST_CHECK(link_0.num_edges() == 1);
@@ -91,13 +91,13 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_abstract_link) {
BOOST_CHECK(link_0[*(edge_handle)].second() == Root_vertex_handle(4));
auto link_geometric_0 = complex.link(Vertex_handle(0));
- std::cout << "\n link_geometric(0):" << link_geometric_0.to_string() << std::endl;
+ std::clog << "\n link_geometric(0):" << link_geometric_0.to_string() << std::endl;
BOOST_CHECK(link_0 == link_geometric_0);
auto print_point = [&](Vertex_handle v) {
- for (auto x : link_geometric_0.point(v)) std::cout << x << " ";
- std::cout << std::endl;
+ for (auto x : link_geometric_0.point(v)) std::clog << x << " ";
+ std::clog << std::endl;
};
std::for_each(link_geometric_0.vertex_range().begin(), link_geometric_0.vertex_range().end(), print_point);
diff --git a/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp b/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp
index b714753d..a85d4ff0 100644
--- a/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp
+++ b/src/Skeleton_blocker/test/test_skeleton_blocker_simplifiable.cpp
@@ -49,12 +49,12 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_contraction1) {
static_cast<Vertex_handle> (y)));
// Print result
- std::cout << "complex before complex" << complex.to_string() << std::endl;
+ std::clog << "complex before complex" << complex.to_string() << std::endl;
- std::cout << std::endl << std::endl;
+ std::clog << std::endl << std::endl;
complex.contract_edge(static_cast<Vertex_handle> (a), static_cast<Vertex_handle> (b));
// Print result
- std::cout << "ContractEdge(0,1)\n";
+ std::clog << "ContractEdge(0,1)\n";
PRINT(complex.to_string());
// verification
@@ -89,13 +89,13 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_contraction2) {
complex.add_blocker(blocker);
// Print result
- std::cout << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "complex complex" << complex.to_string();
+ std::clog << std::endl << std::endl;
complex.contract_edge(static_cast<Vertex_handle> (a), static_cast<Vertex_handle> (b));
- std::cout << "complex.ContractEdge(a,b)" << complex.to_string();
+ std::clog << "complex.ContractEdge(a,b)" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << std::endl << std::endl;
// there should be one blocker (a,c,d,e) in the complex
BOOST_CHECK(complex.contains_blocker(Simplex(static_cast<Vertex_handle> (a), static_cast<Vertex_handle> (x),
@@ -110,8 +110,8 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_link_condition1) {
complex.add_blocker(Simplex(static_cast<Vertex_handle> (0), static_cast<Vertex_handle> (1), static_cast<Vertex_handle> (2)));
// Print result
- std::cout << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "complex complex" << complex.to_string();
+ std::clog << std::endl << std::endl;
BOOST_CHECK(complex.link_condition(Vertex_handle(1), Vertex_handle(2), true));
@@ -125,13 +125,13 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_collapse0) {
complex.add_edge_without_blockers(static_cast<Vertex_handle> (2), static_cast<Vertex_handle> (4));
complex.add_edge_without_blockers(static_cast<Vertex_handle> (3), static_cast<Vertex_handle> (4));
// Print result
- std::cout << "initial complex :\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex :\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
Simplex simplex_123(static_cast<Vertex_handle> (1), static_cast<Vertex_handle> (2), static_cast<Vertex_handle> (3));
complex.remove_star(simplex_123);
- std::cout << "complex.remove_star(1,2,3):\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "complex.remove_star(1,2,3):\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
// verification
BOOST_CHECK(complex.contains_blocker(simplex_123));
@@ -142,13 +142,13 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_collapse1) {
build_complete(4, complex);
complex.add_blocker(Simplex(Vertex_handle(0), Vertex_handle(1), Vertex_handle(2), Vertex_handle(3)));
// Print result
- std::cout << "initial complex :\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex :\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
Simplex simplex_123(Vertex_handle(1), Vertex_handle(2), Vertex_handle(3));
complex.remove_star(simplex_123);
- std::cout << "complex.remove_star(1,2,3):\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "complex.remove_star(1,2,3):\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
// verification
BOOST_CHECK(complex.contains_blocker(simplex_123));
@@ -164,13 +164,13 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_collapse2) {
complex.add_edge_without_blockers(Vertex_handle(3), Vertex_handle(4));
complex.add_blocker(Simplex(Vertex_handle(1), Vertex_handle(2), Vertex_handle(3), Vertex_handle(4)));
// Print result
- std::cout << "initial complex :\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex :\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
Simplex sigma(Vertex_handle(1), Vertex_handle(2), Vertex_handle(3));
complex.remove_star(sigma);
- std::cout << "complex.remove_star(1,2,3):\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "complex.remove_star(1,2,3):\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
// verification
BOOST_CHECK(!complex.contains_blocker(Simplex(Vertex_handle(1), Vertex_handle(2),
@@ -187,11 +187,11 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_collapse3) {
complex.add_edge_without_blockers(Vertex_handle(3), Vertex_handle(4));
complex.add_blocker(Simplex(Vertex_handle(1), Vertex_handle(2), Vertex_handle(3), Vertex_handle(4)));
// Print result
- std::cout << "initial complex:\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex:\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
complex.remove_star(static_cast<Vertex_handle> (2));
- std::cout << "complex after remove star of 2:\n" << complex.to_string();
+ std::clog << "complex after remove star of 2:\n" << complex.to_string();
BOOST_CHECK(complex.contains_blocker(Simplex(Vertex_handle(1), Vertex_handle(3), Vertex_handle(4))));
BOOST_CHECK(!complex.contains_blocker(Simplex(Vertex_handle(1), Vertex_handle(2),
@@ -202,11 +202,11 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_add_simplex) {
Complex complex(4);
build_complete(4, complex);
complex.add_blocker(Simplex(Vertex_handle(0), Vertex_handle(1), Vertex_handle(3)));
- std::cout << "initial complex:\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex:\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
complex.add_simplex(Simplex(Vertex_handle(0), Vertex_handle(1), Vertex_handle(3)));
- std::cout << "complex after add_simplex:\n" << complex.to_string();
+ std::clog << "complex after add_simplex:\n" << complex.to_string();
BOOST_CHECK(complex.num_blockers() == 1);
BOOST_CHECK(complex.contains_blocker(Simplex(Vertex_handle(0), Vertex_handle(1),
Vertex_handle(2), Vertex_handle(3))));
@@ -216,8 +216,8 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_add_simplex2) {
Complex complex;
build_complete(4, complex);
// Print result
- std::cout << "initial complex:\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex:\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
Complex copy(complex.num_vertices());
@@ -232,7 +232,7 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_add_simplex2) {
copy.add_simplex(simplex);
}
- std::cout << "complex after add_simplex:\n" << copy.to_string();
+ std::clog << "complex after add_simplex:\n" << copy.to_string();
BOOST_CHECK(complex.num_blockers() == copy.num_blockers());
BOOST_CHECK(complex.num_edges() == copy.num_edges());
@@ -246,11 +246,11 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_add_simplex3) {
Simplex sigma(Vertex_handle(0), Vertex_handle(1), Vertex_handle(2));
complex.add_blocker(sigma);
// Print result
- std::cout << "initial complex:\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex:\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
complex.add_simplex(sigma);
//should create two blockers 0123 and 0124
- std::cout << "complex after adding simplex 012:\n" << complex.to_string();
+ std::clog << "complex after adding simplex 012:\n" << complex.to_string();
BOOST_CHECK(complex.num_blockers() == 2);
BOOST_CHECK(complex.contains_blocker(Simplex(Vertex_handle(0), Vertex_handle(1),
Vertex_handle(2), Vertex_handle(3))));
@@ -292,11 +292,11 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_add_edge) {
complex.add_edge(Vertex_handle(i), Vertex_handle((i + 1) % 4));
// Print result
- std::cout << "initial complex:\n" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "initial complex:\n" << complex.to_string();
+ std::clog << std::endl << std::endl;
complex.add_edge(Vertex_handle(1), Vertex_handle(3));
//should create two blockers 013 and 012
- std::cout << "complex after adding edge 13:\n" << complex.to_string();
+ std::clog << "complex after adding edge 13:\n" << complex.to_string();
BOOST_CHECK(complex.num_blockers() == 2);
BOOST_CHECK(complex.contains_blocker(Simplex(Vertex_handle(0), Vertex_handle(1), Vertex_handle(3))));
BOOST_CHECK(complex.contains_blocker(Simplex(Vertex_handle(1), Vertex_handle(2), Vertex_handle(3))));
@@ -313,12 +313,12 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_remove_popable_blockers)
complex.add_blocker(sigma1);
complex.add_blocker(sigma2);
- std::cout << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
- std::cout << "complex.RemovePopableBlockers();" << std::endl;
+ std::clog << "complex complex" << complex.to_string();
+ std::clog << std::endl << std::endl;
+ std::clog << "complex.RemovePopableBlockers();" << std::endl;
complex.remove_popable_blockers();
- std::cout << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << "complex complex" << complex.to_string();
+ std::clog << std::endl << std::endl;
BOOST_CHECK(complex.num_blockers() == 1);
@@ -337,12 +337,12 @@ BOOST_AUTO_TEST_CASE(test_skeleton_blocker_simplifiable_remove_popable_blockers)
complex.add_blocker(sigma1);
complex.add_blocker(sigma2);
- std::cout << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
- std::cout << "complex.RemovePopableBlockers();" << std::endl;
+ std::clog << "complex complex" << complex.to_string();
+ std::clog << std::endl << std::endl;
+ std::clog << "complex.RemovePopableBlockers();" << std::endl;
complex.remove_popable_blockers();
- std::cout << "complex complex" << complex.to_string();
+ std::clog << "complex complex" << complex.to_string();
- std::cout << std::endl << std::endl;
+ std::clog << std::endl << std::endl;
BOOST_CHECK(complex.num_blockers() == 0);
}
diff --git a/src/Spatial_searching/example/example_spatial_searching.cpp b/src/Spatial_searching/example/example_spatial_searching.cpp
index 034ad24a..8f9151fc 100644
--- a/src/Spatial_searching/example/example_spatial_searching.cpp
+++ b/src/Spatial_searching/example/example_spatial_searching.cpp
@@ -23,38 +23,38 @@ int main(void) {
Points_ds points_ds(points);
// 10-nearest neighbor query
- std::cout << "10 nearest neighbors from points[20]:\n";
+ std::clog << "10 nearest neighbors from points[20]:\n";
auto knn_range = points_ds.k_nearest_neighbors(points[20], 10, true);
for (auto const& nghb : knn_range)
- std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
+ std::clog << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
// Incremental nearest neighbor query
- std::cout << "Incremental nearest neighbors:\n";
+ std::clog << "Incremental nearest neighbors:\n";
auto inn_range = points_ds.incremental_nearest_neighbors(points[45]);
// Get the neighbors in distance order until we hit the first point
for (auto ins_iterator = inn_range.begin(); ins_iterator->first != 0; ++ins_iterator)
- std::cout << ins_iterator->first << " (sq. dist. = " << ins_iterator->second << ")\n";
+ std::clog << ins_iterator->first << " (sq. dist. = " << ins_iterator->second << ")\n";
// 10-furthest neighbor query
- std::cout << "10 furthest neighbors from points[20]:\n";
+ std::clog << "10 furthest neighbors from points[20]:\n";
auto kfn_range = points_ds.k_furthest_neighbors(points[20], 10, true);
for (auto const& nghb : kfn_range)
- std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
+ std::clog << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
// Incremental furthest neighbor query
- std::cout << "Incremental furthest neighbors:\n";
+ std::clog << "Incremental furthest neighbors:\n";
auto ifn_range = points_ds.incremental_furthest_neighbors(points[45]);
// Get the neighbors in distance reverse order until we hit the first point
for (auto ifs_iterator = ifn_range.begin(); ifs_iterator->first != 0; ++ifs_iterator)
- std::cout << ifs_iterator->first << " (sq. dist. = " << ifs_iterator->second << ")\n";
+ std::clog << ifs_iterator->first << " (sq. dist. = " << ifs_iterator->second << ")\n";
// All-near-neighbors search
- std::cout << "All-near-neighbors search:\n";
+ std::clog << "All-near-neighbors search:\n";
std::vector<std::size_t> rs_result;
points_ds.all_near_neighbors(points[45], 0.5, std::back_inserter(rs_result));
K k;
for (auto const& p_idx : rs_result)
- std::cout << p_idx << " (sq. dist. = " << k.squared_distance_d_object()(points[p_idx], points[45]) << ")\n";
+ std::clog << p_idx << " (sq. dist. = " << k.squared_distance_d_object()(points[p_idx], points[45]) << ")\n";
return 0;
}
diff --git a/src/Subsampling/example/example_choose_n_farthest_points.cpp b/src/Subsampling/example/example_choose_n_farthest_points.cpp
index 5cfeb4d8..27cf5d4e 100644
--- a/src/Subsampling/example/example_choose_n_farthest_points.cpp
+++ b/src/Subsampling/example/example_choose_n_farthest_points.cpp
@@ -23,8 +23,8 @@ int main(void) {
Gudhi::subsampling::choose_n_farthest_points(k, points, 100,
Gudhi::subsampling::random_starting_point,
std::back_inserter(results));
- std::cout << "Before sparsification: " << points.size() << " points.\n";
- std::cout << "After sparsification: " << results.size() << " points.\n";
+ std::clog << "Before sparsification: " << points.size() << " points.\n";
+ std::clog << "After sparsification: " << results.size() << " points.\n";
return 0;
}
diff --git a/src/Subsampling/example/example_custom_kernel.cpp b/src/Subsampling/example/example_custom_kernel.cpp
index f1eb757b..535bf42a 100644
--- a/src/Subsampling/example/example_custom_kernel.cpp
+++ b/src/Subsampling/example/example_custom_kernel.cpp
@@ -55,9 +55,9 @@ int main(void) {
Gudhi::subsampling::choose_n_farthest_points(k, points, 2,
Gudhi::subsampling::random_starting_point,
std::back_inserter(results));
- std::cout << "Before sparsification: " << points.size() << " points.\n";
- std::cout << "After sparsification: " << results.size() << " points.\n";
- std::cout << "Result table: {" << results[0] << "," << results[1] << "}\n";
+ std::clog << "Before sparsification: " << points.size() << " points.\n";
+ std::clog << "After sparsification: " << results.size() << " points.\n";
+ std::clog << "Result table: {" << results[0] << "," << results[1] << "}\n";
return 0;
}
diff --git a/src/Subsampling/example/example_pick_n_random_points.cpp b/src/Subsampling/example/example_pick_n_random_points.cpp
index 25266403..316feed1 100644
--- a/src/Subsampling/example/example_pick_n_random_points.cpp
+++ b/src/Subsampling/example/example_pick_n_random_points.cpp
@@ -21,8 +21,8 @@ int main(void) {
K k;
std::vector<Point_d> results;
Gudhi::subsampling::pick_n_random_points(points, 100, std::back_inserter(results));
- std::cout << "Before sparsification: " << points.size() << " points.\n";
- std::cout << "After sparsification: " << results.size() << " points.\n";
+ std::clog << "Before sparsification: " << points.size() << " points.\n";
+ std::clog << "After sparsification: " << results.size() << " points.\n";
return 0;
}
diff --git a/src/Subsampling/example/example_sparsify_point_set.cpp b/src/Subsampling/example/example_sparsify_point_set.cpp
index a8caa720..1e2c38c1 100644
--- a/src/Subsampling/example/example_sparsify_point_set.cpp
+++ b/src/Subsampling/example/example_sparsify_point_set.cpp
@@ -21,8 +21,8 @@ int main(void) {
K k;
std::vector<Point_d> results;
Gudhi::subsampling::sparsify_point_set(k, points, 0.4, std::back_inserter(results));
- std::cout << "Before sparsification: " << points.size() << " points.\n";
- std::cout << "After sparsification: " << results.size() << " points.\n";
+ std::clog << "Before sparsification: " << points.size() << " points.\n";
+ std::clog << "After sparsification: " << results.size() << " points.\n";
return 0;
}
diff --git a/src/Subsampling/test/test_pick_n_random_points.cpp b/src/Subsampling/test/test_pick_n_random_points.cpp
index 018fb8d2..fafae2af 100644
--- a/src/Subsampling/test/test_pick_n_random_points.cpp
+++ b/src/Subsampling/test/test_pick_n_random_points.cpp
@@ -49,9 +49,9 @@ BOOST_AUTO_TEST_CASE(test_pick_n_random_points)
std::vector<Point_d> results;
Gudhi::subsampling::pick_n_random_points(vect, 5, std::back_inserter(results));
- std::cout << "landmark vector contains: ";
+ std::clog << "landmark vector contains: ";
for (auto l: results)
- std::cout << l << "\n";
+ std::clog << l << "\n";
BOOST_CHECK(results.size() == 5);
}
diff --git a/src/Subsampling/test/test_sparsify_point_set.cpp b/src/Subsampling/test/test_sparsify_point_set.cpp
index 587ab3ad..cdcfbff5 100644
--- a/src/Subsampling/test/test_sparsify_point_set.cpp
+++ b/src/Subsampling/test/test_sparsify_point_set.cpp
@@ -34,10 +34,10 @@ BOOST_AUTO_TEST_CASE(test_sparsify_point_set)
K k;
std::vector<Point_d> results;
Gudhi::subsampling::sparsify_point_set(k, points, 0.5, std::back_inserter(results));
- std::cout << "Before sparsification: " << points.size() << " points.\n";
- std::cout << "After sparsification: " << results.size() << " points.\n";
+ std::clog << "Before sparsification: " << points.size() << " points.\n";
+ std::clog << "After sparsification: " << results.size() << " points.\n";
//for (auto p : results)
- // std::cout << p << "\n";
+ // std::clog << p << "\n";
BOOST_CHECK(points.size() > results.size());
}
diff --git a/src/Tangential_complex/test/test_tangential_complex.cpp b/src/Tangential_complex/test/test_tangential_complex.cpp
index 46caec54..023c1e1a 100644
--- a/src/Tangential_complex/test/test_tangential_complex.cpp
+++ b/src/Tangential_complex/test/test_tangential_complex.cpp
@@ -76,14 +76,14 @@ BOOST_AUTO_TEST_CASE(test_mini_tangential) {
points.push_back(Point(point.size(), point.begin(), point.end()));
point = {1.0, 1.0};
points.push_back(Point(point.size(), point.begin(), point.end()));
- std::cout << "points = " << points.size() << std::endl;
+ std::clog << "points = " << points.size() << std::endl;
Kernel k;
// Compute the TC
TC tc(points, INTRINSIC_DIM, k);
tc.compute_tangential_complex();
TC::Num_inconsistencies num_inc = tc.number_of_inconsistent_simplices();
- std::cout << "TC vertices = " << tc.number_of_vertices() << " - simplices = " << num_inc.num_simplices <<
+ std::clog << "TC vertices = " << tc.number_of_vertices() << " - simplices = " << num_inc.num_simplices <<
" - inc simplices = " << num_inc.num_inconsistent_simplices <<
" - inc stars = " << num_inc.num_inconsistent_stars << std::endl;
@@ -95,7 +95,7 @@ BOOST_AUTO_TEST_CASE(test_mini_tangential) {
// Export the TC into a Simplex_tree
Gudhi::Simplex_tree<> stree;
tc.create_complex(stree);
- std::cout << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
+ std::clog << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
BOOST_CHECK(stree.num_vertices() == 4);
BOOST_CHECK(stree.num_simplices() == 6);
@@ -109,7 +109,7 @@ BOOST_AUTO_TEST_CASE(test_mini_tangential) {
// Export the TC into a Simplex_tree
tc.create_complex(stree);
- std::cout << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
+ std::clog << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
BOOST_CHECK(stree.num_vertices() == 4);
BOOST_CHECK(stree.num_simplices() == 6);
@@ -139,7 +139,7 @@ BOOST_AUTO_TEST_CASE(test_basic_example_throw) {
// Compute the TC
TC tc(points, INTRINSIC_DIM, k);
tc.set_max_squared_edge_length(0.01);
- std::cout << "test_basic_example_throw - set_max_squared_edge_length(0.01) to make GUDHI_CHECK fail" << std::endl;
+ std::clog << "test_basic_example_throw - set_max_squared_edge_length(0.01) to make GUDHI_CHECK fail" << std::endl;
BOOST_CHECK_THROW(tc.compute_tangential_complex(), std::invalid_argument);
}
diff --git a/src/Toplex_map/benchmark/benchmark_tm.cpp b/src/Toplex_map/benchmark/benchmark_tm.cpp
index feb5d01c..d078fcf8 100644
--- a/src/Toplex_map/benchmark/benchmark_tm.cpp
+++ b/src/Toplex_map/benchmark/benchmark_tm.cpp
@@ -25,10 +25,10 @@ typedef std::pair<Simplex_tree<>::Simplex_handle, bool> typePairSimplexBool;
class ST_wrapper {
public:
void insert_simplex(const Simplex& tau) {
- /*std::cout << "insert_simplex - " << simplexTree.num_simplices() << " - ";
+ /*std::clog << "insert_simplex - " << simplexTree.num_simplices() << " - ";
for (auto v : tau)
- std::cout << v << ", ";
- std::cout << std::endl;
+ std::clog << v << ", ";
+ std::clog << std::endl;
*/
simplexTree.insert_simplex_and_subfaces(tau);
}
@@ -104,22 +104,22 @@ void chrono(int n, int d) {
auto c2 = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
if (c3 > 0)
- std::cout << c1 << "\t \t" << c2 << "\t \t" << c3 << "\t \t" << K.num_maximal_simplices() << std::endl;
+ std::clog << c1 << "\t \t" << c2 << "\t \t" << c3 << "\t \t" << K.num_maximal_simplices() << std::endl;
else
- std::cout << c1 << "\t \t" << c2 << "\t \tN/A\t \t" << K.num_maximal_simplices() << std::endl;
+ std::clog << c1 << "\t \t" << c2 << "\t \tN/A\t \t" << K.num_maximal_simplices() << std::endl;
}
int main() {
for (int d = 5; d <= 40; d += 5) {
- std::cout << "d=" << d << " \t Insertions \t Membership \t Contractions \t Size" << std::endl;
- std::cout << "T Map \t \t";
+ std::clog << "d=" << d << " \t Insertions \t Membership \t Contractions \t Size" << std::endl;
+ std::clog << "T Map \t \t";
chrono<Toplex_map>(n, d);
- std::cout << "Lazy \t \t";
+ std::clog << "Lazy \t \t";
chrono<Lazy_toplex_map>(n, d);
if (d <= 15) {
- std::cout << "ST \t \t";
+ std::clog << "ST \t \t";
chrono<ST_wrapper>(n, d);
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
}
diff --git a/src/Toplex_map/example/simple_toplex_map.cpp b/src/Toplex_map/example/simple_toplex_map.cpp
index 7538c989..c432608e 100644
--- a/src/Toplex_map/example/simple_toplex_map.cpp
+++ b/src/Toplex_map/example/simple_toplex_map.cpp
@@ -31,72 +31,72 @@ int main(int argc, char* const argv[]) {
/* o---o */
/* 1 3 */
- std::cout << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
+ std::clog << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
<< std::endl;
// Browse maximal cofaces
Simplex sigma3 = {2, 3};
- std::cout << "Maximal cofaces of {2, 3} are :" << std::endl;
+ std::clog << "Maximal cofaces of {2, 3} are :" << std::endl;
for (auto simplex_ptr : tm.maximal_cofaces(sigma3, 2)) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
Simplex sigma4 = {1, 3};
assert(tm.membership(sigma4));
Gudhi::Toplex_map::Vertex v = tm.contraction(1, 3);
- std::cout << "After contraction(1, 3) - " << v << std::endl;
+ std::clog << "After contraction(1, 3) - " << v << std::endl;
/* Simplex is: */
/* 2 4 */
/* o---o */
/* \5/ */
/* o */
/* 3 */
- std::cout << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
+ std::clog << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
<< std::endl;
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
Simplex sigma5 = {3, 4};
assert(tm.membership(sigma5));
v = tm.contraction(3, 4);
- std::cout << "After contraction(3, 4) - " << v << std::endl;
+ std::clog << "After contraction(3, 4) - " << v << std::endl;
/* Simplex is: */
/* 2 4 */
/* o---o */
/* \X/ */
/* o */
/* 5 */
- std::cout << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
+ std::clog << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
<< std::endl;
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
tm.insert_simplex(sigma1);
@@ -109,44 +109,44 @@ int main(int argc, char* const argv[]) {
/* 1 3 */
tm.remove_simplex(sigma1);
- std::cout << "After remove_simplex(1, 2, 3)" << std::endl;
+ std::clog << "After remove_simplex(1, 2, 3)" << std::endl;
/* Simplex is: */
/* 2 4 */
/* o---o */
/* / \5/ */
/* o---o */
/* 1 3 */
- std::cout << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
+ std::clog << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
<< std::endl;
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
tm.remove_vertex(1);
- std::cout << "After remove_vertex(1)" << std::endl;
+ std::clog << "After remove_vertex(1)" << std::endl;
/* Simplex is: */
/* 2 4 */
/* o---o */
/* \5/ */
/* o */
/* 3 */
- std::cout << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
+ std::clog << "num max simplices = " << tm.num_maximal_simplices() << " - num vertices = " << tm.num_vertices()
<< std::endl;
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
return 0;
diff --git a/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp b/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp
index 639bf35a..994cee8e 100644
--- a/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp
+++ b/src/Toplex_map/test/lazy_toplex_map_unit_test.cpp
@@ -20,43 +20,43 @@ BOOST_AUTO_TEST_CASE(toplex_map) {
using Vertex = Gudhi::Lazy_toplex_map::Vertex;
Gudhi::Lazy_toplex_map tm;
- std::cout << "insert_simplex {1, 2, 3, 4}" << std::endl;
+ std::clog << "insert_simplex {1, 2, 3, 4}" << std::endl;
std::vector<Vertex> sigma1 = {1, 2, 3, 4};
tm.insert_simplex(sigma1);
- std::cout << "insert_simplex {5, 2, 3, 6}" << std::endl;
+ std::clog << "insert_simplex {5, 2, 3, 6}" << std::endl;
std::vector<Vertex> sigma2 = {5, 2, 3, 6};
tm.insert_simplex(sigma2);
- std::cout << "insert_simplex {5}" << std::endl;
+ std::clog << "insert_simplex {5}" << std::endl;
std::vector<Vertex> sigma3 = {5};
tm.insert_simplex(sigma3);
- std::cout << "insert_simplex {4, 5, 3}" << std::endl;
+ std::clog << "insert_simplex {4, 5, 3}" << std::endl;
std::vector<Vertex> sigma6 = {4, 5, 3};
tm.insert_simplex(sigma6);
- std::cout << "insert_simplex {4, 5, 9}" << std::endl;
+ std::clog << "insert_simplex {4, 5, 9}" << std::endl;
std::vector<Vertex> sigma7 = {4, 5, 9};
tm.insert_simplex(sigma7);
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 5);
std::vector<Vertex> sigma4 = {5, 2, 3};
std::vector<Vertex> sigma5 = {5, 2, 7};
BOOST_CHECK(tm.membership(sigma4));
BOOST_CHECK(!tm.membership(sigma5));
- std::cout << "insert_simplex {5, 2, 7}" << std::endl;
+ std::clog << "insert_simplex {5, 2, 7}" << std::endl;
tm.insert_simplex(sigma5);
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 6);
BOOST_CHECK(tm.membership(sigma5));
- std::cout << "contraction(4,5)" << std::endl;
+ std::clog << "contraction(4,5)" << std::endl;
auto r = tm.contraction(4, 5);
- std::cout << "r=" << r << std::endl;
+ std::clog << "r=" << r << std::endl;
BOOST_CHECK(r == 5);
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 6);
std::vector<Vertex> sigma8 = {1, 2, 3};
@@ -68,11 +68,11 @@ BOOST_AUTO_TEST_CASE(toplex_map) {
BOOST_CHECK(tm.membership(sigma8));
BOOST_CHECK(tm.membership(sigma9));
- std::cout << "remove_simplex({2, 7, r = 5})" << std::endl;
+ std::clog << "remove_simplex({2, 7, r = 5})" << std::endl;
tm.remove_simplex(sigma9);
BOOST_CHECK(!tm.membership(sigma9));
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 8);
// {2, 7, 5} is removed, but verify its edges are still there
@@ -88,71 +88,71 @@ BOOST_AUTO_TEST_CASE(toplex_map_empty_toplex) {
using Vertex = Gudhi::Lazy_toplex_map::Vertex;
Gudhi::Lazy_toplex_map tm;
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 0);
- std::cout << "num_vertices = " << tm.num_vertices() << std::endl;
+ std::clog << "num_vertices = " << tm.num_vertices() << std::endl;
BOOST_CHECK(tm.num_vertices() == 0);
- std::cout << "Check an empty simplex is a member." << std::endl;
+ std::clog << "Check an empty simplex is a member." << std::endl;
std::vector<Vertex> empty_sigma = {};
BOOST_CHECK(tm.membership(empty_sigma));
- std::cout << "Check the edge 2,7 is not a member." << std::endl;
+ std::clog << "Check the edge 2,7 is not a member." << std::endl;
std::vector<Vertex> edge = {2, 7};
BOOST_CHECK(!tm.membership(edge));
- std::cout << "Insert an empty simplex." << std::endl;
+ std::clog << "Insert an empty simplex." << std::endl;
tm.insert_simplex(empty_sigma);
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 0);
- std::cout << "num_vertices = " << tm.num_vertices() << std::endl;
+ std::clog << "num_vertices = " << tm.num_vertices() << std::endl;
BOOST_CHECK(tm.num_vertices() == 0);
- std::cout << "Check an empty simplex is a member." << std::endl;
+ std::clog << "Check an empty simplex is a member." << std::endl;
BOOST_CHECK(tm.membership(empty_sigma));
- std::cout << "Check the edge 2,7 is not a member." << std::endl;
+ std::clog << "Check the edge 2,7 is not a member." << std::endl;
BOOST_CHECK(!tm.membership(edge));
- std::cout << "Insert edge 2,7." << std::endl;
+ std::clog << "Insert edge 2,7." << std::endl;
tm.insert_simplex(edge);
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 1);
- std::cout << "num_vertices = " << tm.num_vertices() << std::endl;
+ std::clog << "num_vertices = " << tm.num_vertices() << std::endl;
BOOST_CHECK(tm.num_vertices() == 2);
- std::cout << "Check an empty simplex is a member." << std::endl;
+ std::clog << "Check an empty simplex is a member." << std::endl;
BOOST_CHECK(tm.membership(empty_sigma));
- std::cout << "Check the edge 2,7 is a member." << std::endl;
+ std::clog << "Check the edge 2,7 is a member." << std::endl;
BOOST_CHECK(tm.membership(edge));
- std::cout << "contraction(2,7)" << std::endl;
+ std::clog << "contraction(2,7)" << std::endl;
auto r = tm.contraction(2, 7);
- std::cout << "r=" << r << std::endl;
+ std::clog << "r=" << r << std::endl;
BOOST_CHECK(r == 7);
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 1);
- std::cout << "num_vertices = " << tm.num_vertices() << std::endl;
+ std::clog << "num_vertices = " << tm.num_vertices() << std::endl;
BOOST_CHECK(tm.num_vertices() == 1);
- std::cout << "Check an empty simplex is a member." << std::endl;
+ std::clog << "Check an empty simplex is a member." << std::endl;
BOOST_CHECK(tm.membership(empty_sigma));
- std::cout << "Check the edge 2,7 is not a member." << std::endl;
+ std::clog << "Check the edge 2,7 is not a member." << std::endl;
BOOST_CHECK(!tm.membership(edge));
- std::cout << "Remove the vertex 7." << std::endl;
+ std::clog << "Remove the vertex 7." << std::endl;
std::vector<Vertex> vertex = {7};
tm.remove_simplex(vertex);
- std::cout << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices = " << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 0);
- std::cout << "num_vertices = " << tm.num_vertices() << std::endl;
+ std::clog << "num_vertices = " << tm.num_vertices() << std::endl;
BOOST_CHECK(tm.num_vertices() == 0);
- std::cout << "Check an empty simplex is a member." << std::endl;
+ std::clog << "Check an empty simplex is a member." << std::endl;
BOOST_CHECK(tm.membership(empty_sigma));
- std::cout << "Check the edge 2,7 is not a member." << std::endl;
+ std::clog << "Check the edge 2,7 is not a member." << std::endl;
BOOST_CHECK(!tm.membership(edge));
}
diff --git a/src/Toplex_map/test/toplex_map_unit_test.cpp b/src/Toplex_map/test/toplex_map_unit_test.cpp
index 24ec679b..0d0751ff 100644
--- a/src/Toplex_map/test/toplex_map_unit_test.cpp
+++ b/src/Toplex_map/test/toplex_map_unit_test.cpp
@@ -20,31 +20,31 @@ BOOST_AUTO_TEST_CASE(toplex_map) {
using Vertex = Gudhi::Toplex_map::Vertex;
Gudhi::Toplex_map tm;
- std::cout << "insert_simplex {1, 2, 3, 4}" << std::endl;
+ std::clog << "insert_simplex {1, 2, 3, 4}" << std::endl;
std::vector<Vertex> sigma1 = {1, 2, 3, 4};
tm.insert_simplex(sigma1);
- std::cout << "insert_simplex {5, 2, 3, 6}" << std::endl;
+ std::clog << "insert_simplex {5, 2, 3, 6}" << std::endl;
std::vector<Vertex> sigma2 = {5, 2, 3, 6};
tm.insert_simplex(sigma2);
- std::cout << "insert_simplex {5}" << std::endl;
+ std::clog << "insert_simplex {5}" << std::endl;
std::vector<Vertex> sigma3 = {5};
tm.insert_simplex(sigma3);
- std::cout << "insert_simplex {4, 5, 3}" << std::endl;
+ std::clog << "insert_simplex {4, 5, 3}" << std::endl;
std::vector<Vertex> sigma6 = {4, 5, 3};
tm.insert_simplex(sigma6);
- std::cout << "insert_simplex {4, 5, 9}" << std::endl;
+ std::clog << "insert_simplex {4, 5, 9}" << std::endl;
std::vector<Vertex> sigma7 = {4, 5, 9};
tm.insert_simplex(sigma7);
- std::cout << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 4);
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
BOOST_CHECK(tm.maximality(*simplex_ptr));
}
@@ -58,37 +58,37 @@ BOOST_AUTO_TEST_CASE(toplex_map) {
std::vector<Vertex> sigma5 = {5, 2, 7};
BOOST_CHECK(tm.membership(sigma4));
BOOST_CHECK(!tm.membership(sigma5));
- std::cout << "insert_simplex {5, 2, 7}" << std::endl;
+ std::clog << "insert_simplex {5, 2, 7}" << std::endl;
tm.insert_simplex(sigma5);
- std::cout << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 5);
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
BOOST_CHECK(tm.maximality(*simplex_ptr));
}
BOOST_CHECK(tm.membership(sigma5));
- std::cout << "contraction(4,5)" << std::endl;
+ std::clog << "contraction(4,5)" << std::endl;
auto r = tm.contraction(4, 5);
- std::cout << "r=" << r << std::endl;
+ std::clog << "r=" << r << std::endl;
BOOST_CHECK(r == 5);
- std::cout << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 4);
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
BOOST_CHECK(tm.maximality(*simplex_ptr));
}
@@ -101,19 +101,19 @@ BOOST_AUTO_TEST_CASE(toplex_map) {
BOOST_CHECK(tm.membership(sigma8));
BOOST_CHECK(tm.membership(sigma9));
- std::cout << "remove_simplex({2, 7, r = 5})" << std::endl;
+ std::clog << "remove_simplex({2, 7, r = 5})" << std::endl;
tm.remove_simplex(sigma9);
BOOST_CHECK(!tm.membership(sigma9));
- std::cout << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
+ std::clog << "num_maximal_simplices" << tm.num_maximal_simplices() << std::endl;
BOOST_CHECK(tm.num_maximal_simplices() == 5);
// Browse maximal simplices
- std::cout << "Maximal simplices are :" << std::endl;
+ std::clog << "Maximal simplices are :" << std::endl;
for (auto simplex_ptr : tm.maximal_simplices()) {
for (auto v : *simplex_ptr) {
- std::cout << v << ", ";
+ std::clog << v << ", ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
BOOST_CHECK(tm.maximality(*simplex_ptr));
}
// {2, 7, 5} is removed, but verify its edges are still there
diff --git a/src/Witness_complex/example/example_nearest_landmark_table.cpp b/src/Witness_complex/example/example_nearest_landmark_table.cpp
index 441900c1..14101847 100644
--- a/src/Witness_complex/example/example_nearest_landmark_table.cpp
+++ b/src/Witness_complex/example/example_nearest_landmark_table.cpp
@@ -33,7 +33,7 @@ int main(int argc, char * const argv[]) {
Witness_complex witness_complex(nlt);
witness_complex.create_complex(simplex_tree, .41);
- std::cout << "Number of simplices: " << simplex_tree.num_simplices() << std::endl;
+ std::clog << "Number of simplices: " << simplex_tree.num_simplices() << std::endl;
Persistent_cohomology pcoh(simplex_tree);
// initializes the coefficient field for homology
diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
index 19f73836..583a04ab 100644
--- a/src/Witness_complex/example/example_strong_witness_complex_off.cpp
+++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
@@ -38,8 +38,8 @@ int main(int argc, char* const argv[]) {
}
point_vector = Point_vector(off_reader.get_point_cloud());
- std::cout << "Successfully read " << point_vector.size() << " points.\n";
- std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
+ std::clog << "Successfully read " << point_vector.size() << " points.\n";
+ std::clog << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
// Choose landmarks (decomment one of the following two lines)
// Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
@@ -52,6 +52,6 @@ int main(int argc, char* const argv[]) {
witness_complex.create_complex(simplex_tree, alpha2, lim_dim);
end = clock();
- std::cout << "Strong witness complex took " << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
- std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
+ std::clog << "Strong witness complex took " << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
+ std::clog << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
}
diff --git a/src/Witness_complex/example/example_witness_complex_off.cpp b/src/Witness_complex/example/example_witness_complex_off.cpp
index be11c955..3635da78 100644
--- a/src/Witness_complex/example/example_witness_complex_off.cpp
+++ b/src/Witness_complex/example/example_witness_complex_off.cpp
@@ -42,8 +42,8 @@ int main(int argc, char * const argv[]) {
}
point_vector = Point_vector(off_reader.get_point_cloud());
- std::cout << "Successfully read " << point_vector.size() << " points.\n";
- std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
+ std::clog << "Successfully read " << point_vector.size() << " points.\n";
+ std::clog << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
// Choose landmarks (decomment one of the following two lines)
// Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
@@ -56,7 +56,7 @@ int main(int argc, char * const argv[]) {
witness_complex.create_complex(simplex_tree, alpha2, lim_dim);
end = clock();
- std::cout << "Witness complex took "
+ std::clog << "Witness complex took "
<< static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
- std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
+ std::clog << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
}
diff --git a/src/Witness_complex/example/example_witness_complex_sphere.cpp b/src/Witness_complex/example/example_witness_complex_sphere.cpp
index 9e3c972d..78d5db4f 100644
--- a/src/Witness_complex/example/example_witness_complex_sphere.cpp
+++ b/src/Witness_complex/example/example_witness_complex_sphere.cpp
@@ -47,8 +47,8 @@ int main(int argc, char* const argv[]) {
Gudhi::Simplex_tree<> simplex_tree;
Point_Vector point_vector, landmarks;
generate_points_sphere(point_vector, nbP, 4);
- std::cout << "Successfully generated " << point_vector.size() << " points.\n";
- std::cout << "Ambient dimension is " << point_vector[0].size() << ".\n";
+ std::clog << "Successfully generated " << point_vector.size() << " points.\n";
+ std::clog << "Ambient dimension is " << point_vector[0].size() << ".\n";
// Choose landmarks
start = clock();
@@ -62,8 +62,8 @@ int main(int argc, char* const argv[]) {
witness_complex.create_complex(simplex_tree, 0);
end = clock();
double time = static_cast<double>(end - start) / CLOCKS_PER_SEC;
- std::cout << "Witness complex for " << number_of_landmarks << " landmarks took " << time << " s. \n";
- std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
+ std::clog << "Witness complex for " << number_of_landmarks << " landmarks took " << time << " s. \n";
+ std::clog << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
l_time.push_back(std::make_pair(nbP, time));
}
write_data(l_time, "w_time.dat");
diff --git a/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp b/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
index 4f718203..9b19f6dc 100644
--- a/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
+++ b/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
@@ -82,12 +82,12 @@ BOOST_AUTO_TEST_CASE(simple_witness_complex) {
witnesses);
eucl_witness_complex.create_complex(complex, 0);
- std::cout << "complex.num_simplices() = " << complex.num_simplices() << std::endl;
+ std::clog << "complex.num_simplices() = " << complex.num_simplices() << std::endl;
BOOST_CHECK(complex.num_simplices() == 24);
eucl_witness_complex.create_complex(relaxed_complex, 8.01);
- std::cout << "relaxed_complex.num_simplices() = " << relaxed_complex.num_simplices() << std::endl;
+ std::clog << "relaxed_complex.num_simplices() = " << relaxed_complex.num_simplices() << std::endl;
BOOST_CHECK(relaxed_complex.num_simplices() == 239);
// The corner simplex {0,2,5,7} and its cofaces are missing.
@@ -95,12 +95,12 @@ BOOST_AUTO_TEST_CASE(simple_witness_complex) {
WitnessComplex witness_complex(nearest_landmark_table);
witness_complex.create_complex(complex_ne, 0);
- std::cout << "complex.num_simplices() = " << complex_ne.num_simplices() << std::endl;
+ std::clog << "complex.num_simplices() = " << complex_ne.num_simplices() << std::endl;
BOOST_CHECK(complex_ne.num_simplices() == 24);
witness_complex.create_complex(relaxed_complex_ne, 8.01);
- std::cout << "relaxed_complex.num_simplices() = " << relaxed_complex_ne.num_simplices() << std::endl;
+ std::clog << "relaxed_complex.num_simplices() = " << relaxed_complex_ne.num_simplices() << std::endl;
BOOST_CHECK(relaxed_complex_ne.num_simplices() == 239);
@@ -111,10 +111,10 @@ BOOST_AUTO_TEST_CASE(simple_witness_complex) {
eucl_strong_witness_complex.create_complex(strong_relaxed_complex, 9.1);
eucl_strong_witness_complex.create_complex(strong_relaxed_complex2, 9.1, 2);
- std::cout << "strong_relaxed_complex.num_simplices() = " << strong_relaxed_complex.num_simplices() << std::endl;
+ std::clog << "strong_relaxed_complex.num_simplices() = " << strong_relaxed_complex.num_simplices() << std::endl;
BOOST_CHECK(strong_relaxed_complex.num_simplices() == 239);
- std::cout << "strong_relaxed_complex2.num_simplices() = " << strong_relaxed_complex2.num_simplices() << std::endl;
+ std::clog << "strong_relaxed_complex2.num_simplices() = " << strong_relaxed_complex2.num_simplices() << std::endl;
BOOST_CHECK(strong_relaxed_complex2.num_simplices() == 92);
@@ -124,10 +124,10 @@ BOOST_AUTO_TEST_CASE(simple_witness_complex) {
strong_witness_complex.create_complex(strong_relaxed_complex_ne, 9.1);
strong_witness_complex.create_complex(strong_relaxed_complex2_ne, 9.1, 2);
- std::cout << "strong_relaxed_complex.num_simplices() = " << strong_relaxed_complex_ne.num_simplices() << std::endl;
+ std::clog << "strong_relaxed_complex.num_simplices() = " << strong_relaxed_complex_ne.num_simplices() << std::endl;
BOOST_CHECK(strong_relaxed_complex_ne.num_simplices() == 239);
- std::cout << "strong_relaxed_complex2.num_simplices() = " << strong_relaxed_complex2_ne.num_simplices() << std::endl;
+ std::clog << "strong_relaxed_complex2.num_simplices() = " << strong_relaxed_complex2_ne.num_simplices() << std::endl;
BOOST_CHECK(strong_relaxed_complex2_ne.num_simplices() == 92);
diff --git a/src/Witness_complex/test/test_simple_witness_complex.cpp b/src/Witness_complex/test/test_simple_witness_complex.cpp
index 9e3509d3..7c48cc54 100644
--- a/src/Witness_complex/test/test_simple_witness_complex.cpp
+++ b/src/Witness_complex/test/test_simple_witness_complex.cpp
@@ -36,7 +36,7 @@ BOOST_AUTO_TEST_CASE(simple_witness_complex) {
Witness_complex witness_complex(nlt);
BOOST_CHECK(witness_complex.create_complex(stree, 4.1));
- std::cout << "Number of simplices: " << stree.num_simplices() << std::endl;
+ std::clog << "Number of simplices: " << stree.num_simplices() << std::endl;
BOOST_CHECK(stree.num_simplices() == 31);
// Check when complex not empty
@@ -47,7 +47,7 @@ BOOST_AUTO_TEST_CASE(simple_witness_complex) {
BOOST_CHECK(!witness_complex.create_complex(stree2, -0.02));
witness_complex.create_complex(stree2, 4.1, 2);
- std::cout << "Number of simplices: " << stree2.num_simplices() << std::endl;
+ std::clog << "Number of simplices: " << stree2.num_simplices() << std::endl;
BOOST_CHECK(stree2.num_simplices() == 25);
}
diff --git a/src/Witness_complex/utilities/strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp
index 75ba1f4b..1f61c77c 100644
--- a/src/Witness_complex/utilities/strong_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp
@@ -56,8 +56,8 @@ int main(int argc, char* argv[]) {
exit(-1); // ----- >>
}
witnesses = Point_vector(off_reader.get_point_cloud());
- std::cout << "Successfully read " << witnesses.size() << " points.\n";
- std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
+ std::clog << "Successfully read " << witnesses.size() << " points.\n";
+ std::clog << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
// Choose landmarks (decomment one of the following two lines)
// Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
@@ -69,8 +69,8 @@ int main(int argc, char* argv[]) {
strong_witness_complex.create_complex(simplex_tree, max_squared_alpha, lim_d);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Sort the simplices in the order of the filtration
simplex_tree.initialize_filtration();
@@ -107,7 +107,7 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
visible.add_options()("help,h", "produce help message")("landmarks,l", po::value<int>(&nbL),
"Number of landmarks to choose from the point cloud.")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
"Maximal squared relaxation parameter.")(
"field-charac,p", po::value<int>(&p)->default_value(11),
@@ -128,17 +128,17 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Strong witness complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Strong witness complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Witness_complex/utilities/weak_witness_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp
index 0e5b9cc1..93050af5 100644
--- a/src/Witness_complex/utilities/weak_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp
@@ -56,8 +56,8 @@ int main(int argc, char* argv[]) {
exit(-1); // ----- >>
}
witnesses = Point_vector(off_reader.get_point_cloud());
- std::cout << "Successfully read " << witnesses.size() << " points.\n";
- std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
+ std::clog << "Successfully read " << witnesses.size() << " points.\n";
+ std::clog << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
// Choose landmarks (decomment one of the following two lines)
// Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
@@ -69,8 +69,8 @@ int main(int argc, char* argv[]) {
witness_complex.create_complex(simplex_tree, max_squared_alpha, lim_d);
- std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
+ std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
// Sort the simplices in the order of the filtration
simplex_tree.initialize_filtration();
@@ -107,7 +107,7 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
visible.add_options()("help,h", "produce help message")("landmarks,l", po::value<int>(&nbL),
"Number of landmarks to choose from the point cloud.")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in std::clog")(
"max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
"Maximal squared relaxation parameter.")(
"field-charac,p", po::value<int>(&p)->default_value(11),
@@ -128,17 +128,17 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
- std::cout << std::endl;
- std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
- std::cout << "of a Weak witness complex defined on a set of input points.\n \n";
- std::cout << "The output diagram contains one bar per line, written with the convention: \n";
- std::cout << " p dim b d \n";
- std::cout << "where dim is the dimension of the homological feature,\n";
- std::cout << "b and d are respectively the birth and death of the feature and \n";
- std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
-
- std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::cout << visible << std::endl;
+ std::clog << std::endl;
+ std::clog << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::clog << "of a Weak witness complex defined on a set of input points.\n \n";
+ std::clog << "The output diagram contains one bar per line, written with the convention: \n";
+ std::clog << " p dim b d \n";
+ std::clog << "where dim is the dimension of the homological feature,\n";
+ std::clog << "b and d are respectively the birth and death of the feature and \n";
+ std::clog << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::clog << visible << std::endl;
exit(-1);
}
}
diff --git a/src/Witness_complex/utilities/witnesscomplex.md b/src/Witness_complex/utilities/witnesscomplex.md
index 7ea397b9..3a3a7d83 100644
--- a/src/Witness_complex/utilities/witnesscomplex.md
+++ b/src/Witness_complex/utilities/witnesscomplex.md
@@ -29,7 +29,7 @@ and `p` is the characteristic of the field *Z/pZ* used for homology coefficients
* `-h [ --help ]` Produce help message
* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
-* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::cout.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::clog.
* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
@@ -60,7 +60,7 @@ and `p` is the characteristic of the field *Z/pZ* used for homology coefficients
* `-h [ --help ]` Produce help message
* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
-* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::cout.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::clog.
* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
diff --git a/src/cmake/modules/GUDHI_compilation_flags.cmake b/src/cmake/modules/GUDHI_compilation_flags.cmake
index 34c2e065..567fbc40 100644
--- a/src/cmake/modules/GUDHI_compilation_flags.cmake
+++ b/src/cmake/modules/GUDHI_compilation_flags.cmake
@@ -1,7 +1,6 @@
# This files manage compilation flags required by GUDHI
include(TestCXXAcceptsFlag)
-include(CheckCXXSourceCompiles)
# add a compiler flag only if it is accepted
macro(add_cxx_compiler_flag _flag)
@@ -12,32 +11,6 @@ macro(add_cxx_compiler_flag _flag)
endif()
endmacro()
-function(can_cgal_use_cxx11_thread_local)
- # This is because of https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/tss.h
- # CGAL is using boost thread if thread_local is not ready (requires XCode 8 for Mac).
- # The test in https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/config.h
- # #if __has_feature(cxx_thread_local) || \
- # ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
- # ( _MSC_VER >= 1900 )
- # #define CGAL_CAN_USE_CXX11_THREAD_LOCAL
- # #endif
- set(CGAL_CAN_USE_CXX11_THREAD_LOCAL "
- int main() {
- #ifndef __has_feature
- #define __has_feature(x) 0 // Compatibility with non-clang compilers.
- #endif
- #if __has_feature(cxx_thread_local) || \
- ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
- ( _MSC_VER >= 1900 )
- bool has_feature_thread_local = true;
- #else
- // Explicit error of compilation for CMake test purpose - has_feature_thread_local is not defined
- #endif
- bool result = has_feature_thread_local;
- } ")
- check_cxx_source_compiles("${CGAL_CAN_USE_CXX11_THREAD_LOCAL}" CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
-endfunction()
-
set (CMAKE_CXX_STANDARD 14)
enable_testing()
@@ -58,16 +31,6 @@ if (DEBUG_TRACES)
add_definitions(-DDEBUG_TRACES)
endif()
-set(GUDHI_CAN_USE_CXX11_THREAD_LOCAL "
- int main() {
- thread_local int result = 0;
- return result;
- } ")
-check_cxx_source_compiles("${GUDHI_CAN_USE_CXX11_THREAD_LOCAL}" GUDHI_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
-if (GUDHI_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
- add_definitions(-DGUDHI_CAN_USE_CXX11_THREAD_LOCAL)
-endif()
-
if(CMAKE_BUILD_TYPE MATCHES Debug)
message("++ Debug compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_DEBUG}")
else()
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake
index 2d010483..0abe66b7 100644
--- a/src/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -150,6 +150,25 @@ function( find_python_module PYTHON_MODULE_NAME )
endif()
endfunction( find_python_module )
+# For modules that do not define module.__version__
+function( find_python_module_no_version PYTHON_MODULE_NAME )
+ string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}"
+ RESULT_VARIABLE PYTHON_MODULE_RESULT
+ ERROR_VARIABLE PYTHON_MODULE_ERROR)
+ if(PYTHON_MODULE_RESULT EQUAL 0)
+ # Remove carriage return
+ message ("++ Python module ${PYTHON_MODULE_NAME} found")
+ set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
+ else()
+ message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
+ - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
+ - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
+ set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+ endif()
+endfunction( find_python_module_no_version )
+
if( PYTHONINTERP_FOUND )
find_python_module("cython")
find_python_module("pytest")
@@ -160,6 +179,10 @@ if( PYTHONINTERP_FOUND )
find_python_module("sklearn")
find_python_module("ot")
find_python_module("pybind11")
+ find_python_module("torch")
+ find_python_module("pykeops")
+ find_python_module("eagerpy")
+ find_python_module_no_version("hnswlib")
endif()
if(NOT GUDHI_PYTHON_PATH)
diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake
index 257d1939..9cf648e3 100644
--- a/src/cmake/modules/GUDHI_user_version_target.cmake
+++ b/src/cmake/modules/GUDHI_user_version_target.cmake
@@ -26,8 +26,12 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
# Generate bib files for Doxygen - cf. root CMakeLists.txt for explanation
string(TIMESTAMP GUDHI_VERSION_YEAR "%Y")
configure_file(${CMAKE_SOURCE_DIR}/biblio/how_to_cite_gudhi.bib.in "${CMAKE_CURRENT_BINARY_DIR}/biblio/how_to_cite_gudhi.bib" @ONLY)
-file(COPY "${CMAKE_SOURCE_DIR}/biblio/how_to_cite_cgal.bib" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/biblio/")
file(COPY "${CMAKE_SOURCE_DIR}/biblio/bibliography.bib" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/biblio/")
+
+# append cgal citation inside bibliography - sphinx cannot deal with more than one bib file
+file(READ "${CMAKE_SOURCE_DIR}/biblio/how_to_cite_cgal.bib" CGAL_CITATION_CONTENT)
+file(APPEND "${CMAKE_CURRENT_BINARY_DIR}/biblio/bibliography.bib" "${CGAL_CITATION_CONTENT}")
+
# Copy biblio directory for user version
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_CURRENT_BINARY_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio)
diff --git a/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp b/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp
index 0fc145fd..6fe7a887 100644
--- a/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp
+++ b/src/common/benchmark/Graph_simplicial_complex_benchmark.cpp
@@ -66,7 +66,7 @@ void benchmark_proximity_graph(const std::string& msg, const std::string& off_fi
Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file_name);
assert(off_reader.is_valid());
- std::cout << "+ " << msg << std::endl;
+ std::clog << "+ " << msg << std::endl;
results_csv << "\"nb_points\";"
<< "\"nb_simplices\";"
@@ -82,7 +82,7 @@ void benchmark_proximity_graph(const std::string& msg, const std::string& off_fi
Gudhi::Euclidean_distance());
// benchmark end
pg_compute_proximity_graph.end();
- std::cout << pg_compute_proximity_graph;
+ std::clog << pg_compute_proximity_graph;
Gudhi::Simplex_tree<> complex;
Gudhi::Clock st_create_clock(" benchmark_proximity_graph - complex creation");
@@ -91,13 +91,13 @@ void benchmark_proximity_graph(const std::string& msg, const std::string& off_fi
complex.insert_graph(proximity_graph);
// benchmark end
st_create_clock.end();
- std::cout << st_create_clock;
+ std::clog << st_create_clock;
results_csv << off_reader.get_point_cloud().size() << ";" << complex.num_simplices() << ";"
<< pg_compute_proximity_graph.num_seconds() << ";"
<< st_create_clock.num_seconds() << ";" << std::endl;
- std::cout << " benchmark_proximity_graph - nb simplices = " << complex.num_simplices() << std::endl;
+ std::clog << " benchmark_proximity_graph - nb simplices = " << complex.num_simplices() << std::endl;
}
int main(int argc, char * const argv[]) {
diff --git a/src/common/example/example_CGAL_3D_points_off_reader.cpp b/src/common/example/example_CGAL_3D_points_off_reader.cpp
index 4658d8d5..7f4343f0 100644
--- a/src/common/example/example_CGAL_3D_points_off_reader.cpp
+++ b/src/common/example/example_CGAL_3D_points_off_reader.cpp
@@ -35,7 +35,7 @@ int main(int argc, char **argv) {
int n {};
for (auto point : point_cloud) {
++n;
- std::cout << "Point[" << n << "] = (" << point[0] << ", " << point[1] << ", " << point[2] << ")\n";
+ std::clog << "Point[" << n << "] = (" << point[0] << ", " << point[1] << ", " << point[2] << ")\n";
}
return 0;
}
diff --git a/src/common/example/example_CGAL_points_off_reader.cpp b/src/common/example/example_CGAL_points_off_reader.cpp
index f45683a5..b2bcdbcf 100644
--- a/src/common/example/example_CGAL_points_off_reader.cpp
+++ b/src/common/example/example_CGAL_points_off_reader.cpp
@@ -36,10 +36,10 @@ int main(int argc, char **argv) {
int n {};
for (auto point : point_cloud) {
- std::cout << "Point[" << n << "] = ";
+ std::clog << "Point[" << n << "] = ";
for (std::size_t i {0}; i < point.size(); i++)
- std::cout << point[i] << " ";
- std::cout << "\n";
+ std::clog << point[i] << " ";
+ std::clog << "\n";
++n;
}
return 0;
diff --git a/src/common/include/gudhi/Clock.h b/src/common/include/gudhi/Clock.h
index 00ab2f27..6966aaaa 100644
--- a/src/common/include/gudhi/Clock.h
+++ b/src/common/include/gudhi/Clock.h
@@ -41,9 +41,9 @@ class Clock {
return msg;
}
- // Print current value to std::cout
+ // Print current value to std::clog
void print() const {
- std::cout << *this << std::endl;
+ std::clog << *this << std::endl;
}
friend std::ostream& operator<<(std::ostream& stream, const Clock& clock) {
diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h
index 38abc06d..f8375b00 100644
--- a/src/common/include/gudhi/Debug_utils.h
+++ b/src/common/include/gudhi/Debug_utils.h
@@ -27,14 +27,14 @@
#define GUDHI_CHECK_code(CODE)
#endif
-#define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl
+#define PRINT(a) std::clog << #a << ": " << (a) << " (DISP)" << std::endl
// #define DBG_VERBOSE
#ifdef DBG_VERBOSE
- #define DBG(a) std::cout << "DBG: " << (a) << std::endl
- #define DBGMSG(a, b) std::cout << "DBG: " << a << b << std::endl
- #define DBGVALUE(a) std::cout << "DBG: " << #a << ": " << a << std::endl
- #define DBGCONT(a) std::cout << "DBG: container " << #a << " -> "; for (auto x : a) std::cout << x << ","; std::cout << std::endl
+ #define DBG(a) std::clog << "DBG: " << (a) << std::endl
+ #define DBGMSG(a, b) std::clog << "DBG: " << a << b << std::endl
+ #define DBGVALUE(a) std::clog << "DBG: " << #a << ": " << a << std::endl
+ #define DBGCONT(a) std::clog << "DBG: container " << #a << " -> "; for (auto x : a) std::clog << x << ","; std::clog << std::endl
#else
#define DBG(a) (void) 0
#define DBGMSG(a, b) (void) 0
diff --git a/src/common/include/gudhi/Points_3D_off_io.h b/src/common/include/gudhi/Points_3D_off_io.h
index 2d110af3..39b79c96 100644
--- a/src/common/include/gudhi/Points_3D_off_io.h
+++ b/src/common/include/gudhi/Points_3D_off_io.h
@@ -41,7 +41,7 @@ class Points_3D_off_visitor_reader {
*/
void init(int dim, int num_vertices, int num_faces, int num_edges) {
#ifdef DEBUG_TRACES
- std::cout << "Points_3D_off_visitor_reader::init - dim=" << dim << " - num_vertices=" <<
+ std::clog << "Points_3D_off_visitor_reader::init - dim=" << dim << " - num_vertices=" <<
num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl;
#endif // DEBUG_TRACES
if (dim == 3) {
@@ -74,11 +74,11 @@ class Points_3D_off_visitor_reader {
void point(const std::vector<double>& point) {
if (valid_) {
#ifdef DEBUG_TRACES
- std::cout << "Points_3D_off_visitor_reader::point ";
+ std::clog << "Points_3D_off_visitor_reader::point ";
for (auto coordinate : point) {
- std::cout << coordinate << " | ";
+ std::clog << coordinate << " | ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
#endif // DEBUG_TRACES
// Fill the point cloud
point_cloud_.push_back(Point_3(point[0], point[1], point[2]));
diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h
index 99371d56..9dc40568 100644
--- a/src/common/include/gudhi/Points_off_io.h
+++ b/src/common/include/gudhi/Points_off_io.h
@@ -40,7 +40,7 @@ class Points_off_visitor_reader {
*/
void init(int dim, int num_vertices, int num_faces, int num_edges) {
#ifdef DEBUG_TRACES
- std::cout << "Points_off_visitor_reader::init - dim=" << dim << " - num_vertices=" <<
+ std::clog << "Points_off_visitor_reader::init - dim=" << dim << " - num_vertices=" <<
num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl;
#endif // DEBUG_TRACES
if (num_faces > 0) {
@@ -66,11 +66,11 @@ class Points_off_visitor_reader {
*/
void point(const std::vector<double>& point) {
#ifdef DEBUG_TRACES
- std::cout << "Points_off_visitor_reader::point ";
+ std::clog << "Points_off_visitor_reader::point ";
for (auto coordinate : point) {
- std::cout << coordinate << " | ";
+ std::clog << coordinate << " | ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
#endif // DEBUG_TRACES
// Fill the point cloud
point_cloud.push_back(Point_d(point.begin(), point.end()));
diff --git a/src/common/include/gudhi/Unitary_tests_utils.h b/src/common/include/gudhi/Unitary_tests_utils.h
index 9b86460a..9f995d01 100644
--- a/src/common/include/gudhi/Unitary_tests_utils.h
+++ b/src/common/include/gudhi/Unitary_tests_utils.h
@@ -20,7 +20,7 @@ template<typename FloatingType >
void GUDHI_TEST_FLOAT_EQUALITY_CHECK(FloatingType a, FloatingType b,
FloatingType epsilon = std::numeric_limits<FloatingType>::epsilon()) {
#ifdef DEBUG_TRACES
- std::cout << "GUDHI_TEST_FLOAT_EQUALITY_CHECK - " << a << " versus " << b
+ std::clog << "GUDHI_TEST_FLOAT_EQUALITY_CHECK - " << a << " versus " << b
<< " | diff = " << std::fabs(a - b) << " - epsilon = " << epsilon << std::endl;
#endif
BOOST_CHECK(std::fabs(a - b) <= epsilon);
@@ -32,7 +32,7 @@ template<typename FloatingType >
FloatingType GUDHI_PROTECT_FLOAT(FloatingType value) {
volatile FloatingType protected_value = value;
#ifdef DEBUG_TRACES
- std::cout << "GUDHI_PROTECT_FLOAT - " << protected_value << std::endl;
+ std::clog << "GUDHI_PROTECT_FLOAT - " << protected_value << std::endl;
#endif
return protected_value;
}
diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h
index 94cf9ccc..9bbc62b7 100644
--- a/src/common/include/gudhi/distance_functions.h
+++ b/src/common/include/gudhi/distance_functions.h
@@ -97,7 +97,7 @@ class Minimal_enclosing_ball_radius {
Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end());
#ifdef DEBUG_TRACES
- std::cout << "Minimal_enclosing_ball_radius = " << std::sqrt(ms.squared_radius()) << " | nb points = "
+ std::clog << "Minimal_enclosing_ball_radius = " << std::sqrt(ms.squared_radius()) << " | nb points = "
<< boost::size(point_cloud) << " | dimension = "
<< boost::size(*point_cloud.begin()) << std::endl;
#endif // DEBUG_TRACES
diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h
index db31bf5c..0938f5c1 100644
--- a/src/common/include/gudhi/reader_utils.h
+++ b/src/common/include/gudhi/reader_utils.h
@@ -220,7 +220,7 @@ template <typename Filtration_value>
std::vector<std::vector<Filtration_value>> read_lower_triangular_matrix_from_csv_file(const std::string& filename,
const char separator = ';') {
#ifdef DEBUG_TRACES
- std::cout << "Using procedure read_lower_triangular_matrix_from_csv_file \n";
+ std::clog << "Using procedure read_lower_triangular_matrix_from_csv_file \n";
#endif // DEBUG_TRACES
std::vector<std::vector<Filtration_value>> result;
std::ifstream in;
@@ -272,12 +272,12 @@ std::vector<std::vector<Filtration_value>> read_lower_triangular_matrix_from_csv
in.close();
#ifdef DEBUG_TRACES
- std::cerr << "Here is the matrix we read : \n";
+ std::clog << "Here is the matrix we read : \n";
for (size_t i = 0; i != result.size(); ++i) {
for (size_t j = 0; j != result[i].size(); ++j) {
- std::cerr << result[i][j] << " ";
+ std::clog << result[i][j] << " ";
}
- std::cerr << std::endl;
+ std::clog << std::endl;
}
#endif // DEBUG_TRACES
@@ -294,7 +294,7 @@ Note: the function does not check that birth <= death.
template <typename OutputIterator>
void read_persistence_intervals_and_dimension(std::string const& filename, OutputIterator out) {
#ifdef DEBUG_TRACES
- std::cout << "read_persistence_intervals_and_dimension - " << filename << std::endl;
+ std::clog << "read_persistence_intervals_and_dimension - " << filename << std::endl;
#endif // DEBUG_TRACES
std::ifstream in(filename);
if (!in.is_open()) {
@@ -311,11 +311,11 @@ void read_persistence_intervals_and_dimension(std::string const& filename, Outpu
double numbers[4];
int n = sscanf(line.c_str(), "%lf %lf %lf %lf", &numbers[0], &numbers[1], &numbers[2], &numbers[3]);
#ifdef DEBUG_TRACES
- std::cout << "[" << n << "] = ";
+ std::clog << "[" << n << "] = ";
for (int i = 0; i < n; i++) {
- std::cout << numbers[i] << ",";
+ std::clog << numbers[i] << ",";
}
- std::cout << std::endl;
+ std::clog << std::endl;
#endif // DEBUG_TRACES
if (n >= 2) {
int dim = (n >= 3 ? static_cast<int>(numbers[n - 3]) : -1);
diff --git a/src/common/test/test_distance_matrix_reader.cpp b/src/common/test/test_distance_matrix_reader.cpp
index bb619a29..73be8104 100644
--- a/src/common/test/test_distance_matrix_reader.cpp
+++ b/src/common/test/test_distance_matrix_reader.cpp
@@ -28,15 +28,15 @@ BOOST_AUTO_TEST_CASE( lower_triangular_distance_matrix )
',');
for (auto& i : from_lower_triangular) {
for (auto j : i) {
- std::cout << j << " ";
+ std::clog << j << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
- std::cout << "from_lower_triangular size = " << from_lower_triangular.size() << std::endl;
+ std::clog << "from_lower_triangular size = " << from_lower_triangular.size() << std::endl;
BOOST_CHECK(from_lower_triangular.size() == 5);
for (std::size_t i = 0; i < from_lower_triangular.size(); i++) {
- std::cout << "from_lower_triangular[" << i << "] size = " << from_lower_triangular[i].size() << std::endl;
+ std::clog << "from_lower_triangular[" << i << "] size = " << from_lower_triangular[i].size() << std::endl;
BOOST_CHECK(from_lower_triangular[i].size() == i);
}
std::vector<double> expected = {1};
@@ -60,14 +60,14 @@ BOOST_AUTO_TEST_CASE( full_square_distance_matrix )
from_full_square = Gudhi::read_lower_triangular_matrix_from_csv_file<double>("full_square_distance_matrix.csv");
for (auto& i : from_full_square) {
for (auto j : i) {
- std::cout << j << " ";
+ std::clog << j << " ";
}
- std::cout << std::endl;
+ std::clog << std::endl;
}
- std::cout << "from_full_square size = " << from_full_square.size() << std::endl;
+ std::clog << "from_full_square size = " << from_full_square.size() << std::endl;
BOOST_CHECK(from_full_square.size() == 7);
for (std::size_t i = 0; i < from_full_square.size(); i++) {
- std::cout << "from_full_square[" << i << "] size = " << from_full_square[i].size() << std::endl;
+ std::clog << "from_full_square[" << i << "] size = " << from_full_square[i].size() << std::endl;
BOOST_CHECK(from_full_square[i].size() == i);
}
}
diff --git a/src/common/test/test_persistence_intervals_reader.cpp b/src/common/test/test_persistence_intervals_reader.cpp
index 8fb4377d..ac8d0981 100644
--- a/src/common/test/test_persistence_intervals_reader.cpp
+++ b/src/common/test/test_persistence_intervals_reader.cpp
@@ -35,18 +35,18 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_without_dimension )
Persistence_intervals_by_dimension persistence_intervals_by_dimension =
Gudhi::read_persistence_intervals_grouped_by_dimension("persistence_intervals_without_dimension.pers");
- std::cout << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
+ std::clog << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
for (auto map_iter : expected_intervals_by_dimension) {
- std::cout << "key=" << map_iter.first;
+ std::clog << "key=" << map_iter.first;
for (auto vec_iter : map_iter.second)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
}
- std::cout << "\nread_persistence_intervals_grouped_by_dimension - read\n";
+ std::clog << "\nread_persistence_intervals_grouped_by_dimension - read\n";
for (auto map_iter : persistence_intervals_by_dimension) {
- std::cout << "key=" << map_iter.first;
+ std::clog << "key=" << map_iter.first;
for (auto vec_iter : map_iter.second)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
}
BOOST_CHECK(persistence_intervals_by_dimension == expected_intervals_by_dimension);
@@ -60,13 +60,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_without_dimension )
Persistence_intervals persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers");
- std::cout << "\nread_persistence_intervals_in_dimension - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension - read\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -103,18 +103,18 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
Persistence_intervals_by_dimension persistence_intervals_by_dimension =
Gudhi::read_persistence_intervals_grouped_by_dimension("persistence_intervals_with_dimension.pers");
- std::cout << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
+ std::clog << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
for (auto map_iter : expected_intervals_by_dimension) {
- std::cout << "key=" << map_iter.first;
+ std::clog << "key=" << map_iter.first;
for (auto vec_iter : map_iter.second)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
}
- std::cout << "\nread_persistence_intervals_grouped_by_dimension - read\n";
+ std::clog << "\nread_persistence_intervals_grouped_by_dimension - read\n";
for (auto map_iter : persistence_intervals_by_dimension) {
- std::cout << "key=" << map_iter.first;
+ std::clog << "key=" << map_iter.first;
for (auto vec_iter : map_iter.second)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
}
BOOST_CHECK(persistence_intervals_by_dimension == expected_intervals_by_dimension);
@@ -128,13 +128,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
Persistence_intervals persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers");
- std::cout << "\nread_persistence_intervals_in_dimension - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -143,13 +143,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 0);
- std::cout << "\nread_persistence_intervals_in_dimension 0 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 0 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 0 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 0 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -159,13 +159,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 1);
- std::cout << "\nread_persistence_intervals_in_dimension 1 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 1 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 1 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 1 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -173,13 +173,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 2);
- std::cout << "\nread_persistence_intervals_in_dimension 2 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 2 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 2 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 2 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -188,13 +188,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 3);
- std::cout << "\nread_persistence_intervals_in_dimension 3 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 3 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 3 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 3 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -212,18 +212,18 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
Persistence_intervals_by_dimension persistence_intervals_by_dimension =
Gudhi::read_persistence_intervals_grouped_by_dimension("persistence_intervals_with_field.pers");
- std::cout << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
+ std::clog << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
for (auto map_iter : expected_intervals_by_dimension) {
- std::cout << "key=" << map_iter.first;
+ std::clog << "key=" << map_iter.first;
for (auto vec_iter : map_iter.second)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
}
- std::cout << "\nread_persistence_intervals_grouped_by_dimension - read\n";
+ std::clog << "\nread_persistence_intervals_grouped_by_dimension - read\n";
for (auto map_iter : persistence_intervals_by_dimension) {
- std::cout << "key=" << map_iter.first;
+ std::clog << "key=" << map_iter.first;
for (auto vec_iter : map_iter.second)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
}
BOOST_CHECK(persistence_intervals_by_dimension == expected_intervals_by_dimension);
@@ -237,13 +237,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
Persistence_intervals persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers");
- std::cout << "\nread_persistence_intervals_in_dimension - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -252,13 +252,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 0);
- std::cout << "\nread_persistence_intervals_in_dimension 0 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 0 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 0 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 0 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -268,13 +268,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 1);
- std::cout << "\nread_persistence_intervals_in_dimension 1 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 1 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 1 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 1 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -282,13 +282,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 2);
- std::cout << "\nread_persistence_intervals_in_dimension 2 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 2 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 2 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 2 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
@@ -297,13 +297,13 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
persistence_intervals_in_dimension =
Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 3);
- std::cout << "\nread_persistence_intervals_in_dimension 3 - expected\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 3 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
- std::cout << "\nread_persistence_intervals_in_dimension 3 - read\n";
+ std::clog << "\nread_persistence_intervals_in_dimension 3 - read\n";
for (auto vec_iter : persistence_intervals_in_dimension)
- std::cout << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
+ std::clog << " [" << vec_iter.first << " ," << vec_iter.second << "] ";
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 22af3ec9..055d5b23 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -78,6 +78,19 @@ if(PYTHONINTERP_FOUND)
if(OT_FOUND)
add_gudhi_debug_info("POT version ${OT_VERSION}")
endif()
+ if(HNSWLIB_FOUND)
+ # Does not have a version number...
+ add_gudhi_debug_info("HNSWlib found")
+ endif()
+ if(TORCH_FOUND)
+ add_gudhi_debug_info("PyTorch version ${TORCH_VERSION}")
+ endif()
+ if(PYKEOPS_FOUND)
+ add_gudhi_debug_info("PyKeOps version ${PYKEOPS_VERSION}")
+ endif()
+ if(EAGERPY_FOUND)
+ add_gudhi_debug_info("EagerPy version ${EAGERPY_VERSION}")
+ endif()
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
@@ -128,16 +141,6 @@ if(PYTHONINTERP_FOUND)
endif ()
if(CGAL_FOUND)
- can_cgal_use_cxx11_thread_local()
- if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
- if(CMAKE_BUILD_TYPE MATCHES Debug)
- add_GUDHI_PYTHON_lib("${Boost_THREAD_LIBRARY_DEBUG}")
- else()
- add_GUDHI_PYTHON_lib("${Boost_THREAD_LIBRARY_RELEASE}")
- endif()
- message("** Add Boost ${Boost_LIBRARY_DIRS}")
- set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
- endif()
# Add CGAL compilation args
if(CGAL_HEADER_ONLY)
add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}")
@@ -226,7 +229,7 @@ if(PYTHONINTERP_FOUND)
# Other .py files
file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
file(COPY "gudhi/representations" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi/")
- file(COPY "gudhi/wasserstein.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
+ file(COPY "gudhi/wasserstein" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
add_custom_command(
@@ -239,6 +242,71 @@ if(PYTHONINTERP_FOUND)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py install)")
+ # Documentation generation is available through sphinx - requires all modules
+ # Make it first as sphinx test is by far the longest test which is nice when testing in parallel
+ if(SPHINX_PATH)
+ if(MATPLOTLIB_FOUND)
+ if(NUMPY_FOUND)
+ if(SCIPY_FOUND)
+ if(SKLEARN_FOUND)
+ if(OT_FOUND)
+ if(PYBIND11_FOUND)
+ if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
+ # User warning - Sphinx is a static pages generator, and configured to work fine with user_version
+ # Images and biblio warnings because not found on developper version
+ if (GUDHI_PYTHON_PATH STREQUAL "src/python")
+ set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss")
+ endif()
+ # sphinx target requires gudhi.so, because conf.py reads gudhi version from it
+ add_custom_target(sphinx
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
+ DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
+
+ add_test(NAME sphinx_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
+
+ # Set missing or not modules
+ set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES")
+ else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ else(PYBIND11_FOUND)
+ message("++ Python documentation module will not be compiled because pybind11 was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(PYBIND11_FOUND)
+ else(OT_FOUND)
+ message("++ Python documentation module will not be compiled because POT was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(OT_FOUND)
+ else(SKLEARN_FOUND)
+ message("++ Python documentation module will not be compiled because scikit-learn was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(SKLEARN_FOUND)
+ else(SCIPY_FOUND)
+ message("++ Python documentation module will not be compiled because scipy was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(SCIPY_FOUND)
+ else(NUMPY_FOUND)
+ message("++ Python documentation module will not be compiled because numpy was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(NUMPY_FOUND)
+ else(MATPLOTLIB_FOUND)
+ message("++ Python documentation module will not be compiled because matplotlib was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(MATPLOTLIB_FOUND)
+ else(SPHINX_PATH)
+ message("++ Python documentation module will not be compiled because sphinx and sphinxcontrib-bibtex were not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(SPHINX_PATH)
+
+
# Test examples
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
# Bottleneck and Alpha
@@ -399,6 +467,7 @@ if(PYTHONINTERP_FOUND)
# Wasserstein
if(OT_FOUND AND PYBIND11_FOUND)
add_gudhi_py_test(test_wasserstein_distance)
+ add_gudhi_py_test(test_wasserstein_barycenter)
endif()
# Representations
@@ -409,69 +478,11 @@ if(PYTHONINTERP_FOUND)
# Time Delay
add_gudhi_py_test(test_time_delay)
- # Documentation generation is available through sphinx - requires all modules
- if(SPHINX_PATH)
- if(MATPLOTLIB_FOUND)
- if(NUMPY_FOUND)
- if(SCIPY_FOUND)
- if(SKLEARN_FOUND)
- if(OT_FOUND)
- if(PYBIND11_FOUND)
- if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
- set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
- # User warning - Sphinx is a static pages generator, and configured to work fine with user_version
- # Images and biblio warnings because not found on developper version
- if (GUDHI_PYTHON_PATH STREQUAL "src/python")
- set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss")
- endif()
- # sphinx target requires gudhi.so, because conf.py reads gudhi version from it
- add_custom_target(sphinx
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
- DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
- COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
-
- add_test(NAME sphinx_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
-
- # Set missing or not modules
- set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES")
- else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
- message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
- else(PYBIND11_FOUND)
- message("++ Python documentation module will not be compiled because pybind11 was not found")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(PYBIND11_FOUND)
- else(OT_FOUND)
- message("++ Python documentation module will not be compiled because POT was not found")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(OT_FOUND)
- else(SKLEARN_FOUND)
- message("++ Python documentation module will not be compiled because scikit-learn was not found")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(SKLEARN_FOUND)
- else(SCIPY_FOUND)
- message("++ Python documentation module will not be compiled because scipy was not found")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(SCIPY_FOUND)
- else(NUMPY_FOUND)
- message("++ Python documentation module will not be compiled because numpy was not found")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(NUMPY_FOUND)
- else(MATPLOTLIB_FOUND)
- message("++ Python documentation module will not be compiled because matplotlib was not found")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(MATPLOTLIB_FOUND)
- else(SPHINX_PATH)
- message("++ Python documentation module will not be compiled because sphinx and sphinxcontrib-bibtex were not found")
- set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(SPHINX_PATH)
-
+ # DTM
+ if(SCIPY_FOUND AND SKLEARN_FOUND AND TORCH_FOUND AND HNSWLIB_FOUND AND PYKEOPS_FOUND AND EAGERPY_FOUND)
+ add_gudhi_py_test(test_knn)
+ add_gudhi_py_test(test_dtm)
+ endif()
# Set missing or not modules
set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES")
diff --git a/src/python/doc/alpha_complex_sum.inc b/src/python/doc/alpha_complex_sum.inc
index b5af0d27..9e6414d0 100644
--- a/src/python/doc/alpha_complex_sum.inc
+++ b/src/python/doc/alpha_complex_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
| .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau |
| ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | |
- | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 |
+ | :alt: Alpha complex representation | | :Since: GUDHI 2.0.0 |
| :figclass: align-center | The filtration value of each simplex is computed as the **square** of | |
- | | the circumradius of the simplex if the circumsphere is empty (the | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | the circumradius of the simplex if the circumsphere is empty (the | :License: MIT (`GPL v3 </licensing/>`_) |
| | simplex is then said to be Gabriel), and as the minimum of the | |
| | filtration values of the codimension 1 cofaces that make it not | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
| | Gabriel otherwise. | |
diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst
index 60319e84..a3b35c10 100644
--- a/src/python/doc/alpha_complex_user.rst
+++ b/src/python/doc/alpha_complex_user.rst
@@ -10,7 +10,7 @@ Definition
.. include:: alpha_complex_sum.inc
`AlphaComplex` is constructing a :doc:`SimplexTree <simplex_tree_ref>` using
-`Delaunay Triangulation <http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations>`_
+`Delaunay Triangulation <http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations>`_
:cite:`cgal:hdj-t-19b` from `CGAL <http://www.cgal.org/>`_ (the Computational Geometry Algorithms Library
:cite:`cgal:eb-19b`).
@@ -203,9 +203,3 @@ the program output is:
[4, 5, 6] -> 22.74
[3, 6] -> 30.25
-CGAL citations
-==============
-
-.. bibliography:: ../../biblio/how_to_cite_cgal.bib
- :filter: docnames
- :style: unsrt
diff --git a/src/python/doc/bottleneck_distance_sum.inc b/src/python/doc/bottleneck_distance_sum.inc
index 6eb0ac19..0de4625c 100644
--- a/src/python/doc/bottleneck_distance_sum.inc
+++ b/src/python/doc/bottleneck_distance_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
| .. figure:: | Bottleneck distance measures the similarity between two persistence | :Author: François Godi |
| ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | |
- | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Since: GUDHI 2.0.0 |
| | diagonal points) such that any couple of matched points are at | |
- | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :License: MIT (`GPL v3 </licensing/>`_) |
| the longest edge | norm in :math:`\mathbb{R}^2`. | |
| | | :Requires: `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
diff --git a/src/python/doc/bottleneck_distance_user.rst b/src/python/doc/bottleneck_distance_user.rst
index 9435c7f1..89da89d3 100644
--- a/src/python/doc/bottleneck_distance_user.rst
+++ b/src/python/doc/bottleneck_distance_user.rst
@@ -65,3 +65,4 @@ The output is:
Bottleneck distance approximation = 0.81
Bottleneck distance value = 0.75
+
diff --git a/src/python/doc/cubical_complex_sum.inc b/src/python/doc/cubical_complex_sum.inc
index f200e695..28bf8e94 100644
--- a/src/python/doc/cubical_complex_sum.inc
+++ b/src/python/doc/cubical_complex_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+
| .. figure:: | The cubical complex is an example of a structured complex useful in | :Author: Pawel Dlotko |
| ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | |
- | :alt: Cubical complex representation | analysis. | :Introduced in: GUDHI 2.0.0 |
+ | :alt: Cubical complex representation | analysis. | :Since: GUDHI 2.0.0 |
| :figclass: align-center | | |
- | | | :Copyright: MIT |
+ | | | :License: MIT |
| | | |
+--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+
| * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
diff --git a/src/python/doc/cubical_complex_user.rst b/src/python/doc/cubical_complex_user.rst
index 56cf0170..e4733653 100644
--- a/src/python/doc/cubical_complex_user.rst
+++ b/src/python/doc/cubical_complex_user.rst
@@ -8,7 +8,7 @@ Definition
----------
===================================== ===================================== =====================================
-:Author: Pawel Dlotko :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3
+:Author: Pawel Dlotko :Since: GUDHI PYTHON 2.0.0 :License: GPL v3
===================================== ===================================== =====================================
+---------------------------------------------+----------------------------------------------------------------------+
@@ -158,10 +158,3 @@ Examples.
---------
End user programs are available in python/example/ folder.
-
-Bibliography
-============
-
-.. bibliography:: ../../biblio/bibliography.bib
- :filter: docnames
- :style: unsrt
diff --git a/src/python/doc/img/barycenter.png b/src/python/doc/img/barycenter.png
new file mode 100644
index 00000000..cad6af70
--- /dev/null
+++ b/src/python/doc/img/barycenter.png
Binary files differ
diff --git a/src/python/doc/index.rst b/src/python/doc/index.rst
index 3387a64f..13e51047 100644
--- a/src/python/doc/index.rst
+++ b/src/python/doc/index.rst
@@ -71,6 +71,7 @@ Wasserstein distance
.. include:: wasserstein_distance_sum.inc
+
Persistence representations
===========================
@@ -85,10 +86,3 @@ Point cloud utilities
*********************
.. include:: point_cloud_sum.inc
-
-Bibliography
-************
-
-.. bibliography:: ../../biblio/bibliography.bib
- :filter: docnames
- :style: unsrt
diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst
index d459145b..09a843d5 100644
--- a/src/python/doc/installation.rst
+++ b/src/python/doc/installation.rst
@@ -175,8 +175,8 @@ Documentation
To build the documentation, `sphinx-doc <http://www.sphinx-doc.org>`_ and
`sphinxcontrib-bibtex <https://sphinxcontrib-bibtex.readthedocs.io>`_ are
required. As the documentation is auto-tested, `CGAL`_, `Eigen`_,
-`Matplotlib`_, `NumPy`_ and `SciPy`_ are also mandatory to build the
-documentation.
+`Matplotlib`_, `NumPy`_, `POT`_, `Scikit-learn`_ and `SciPy`_ are
+also mandatory to build the documentation.
Run the following commands in a terminal:
@@ -192,8 +192,8 @@ CGAL
====
Some GUDHI modules (cf. :doc:`modules list </index>`), and few examples
-require CGAL, a C++ library that provides easy access to efficient and
-reliable geometric algorithms.
+require `CGAL <https://www.cgal.org/>`_, a C++ library that provides easy
+access to efficient and reliable geometric algorithms.
The procedure to install this library
@@ -211,6 +211,14 @@ The following examples requires CGAL version ≥ 4.11.0:
* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
* :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+EagerPy
+=======
+
+Some Python functions can handle automatic differentiation (possibly only when
+a flag `enable_autodiff=True` is used). In order to reduce code duplication, we
+use `EagerPy <https://eagerpy.jonasrauber.de/>`_ which wraps arrays from
+PyTorch, TensorFlow and JAX in a common interface.
+
Eigen
=====
@@ -229,6 +237,13 @@ The following examples require `Eigen <http://eigen.tuxfamily.org/>`_ version â‰
* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
* :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+Hnswlib
+=======
+
+:class:`~gudhi.point_cloud.knn.KNearestNeighbors` can use the Python package
+`Hnswlib <https://github.com/nmslib/hnswlib>`_ as a backend if explicitly
+requested, to speed-up queries.
+
Matplotlib
==========
@@ -251,6 +266,13 @@ The following examples require the `Matplotlib <http://matplotlib.org>`_:
* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
* :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+PyKeOps
+=======
+
+:class:`~gudhi.point_cloud.knn.KNearestNeighbors` can use the Python package
+`PyKeOps <https://www.kernel-operations.io/keops/python/>`_ as a backend if
+explicitly requested, to speed-up queries using a GPU.
+
Python Optimal Transport
========================
@@ -258,6 +280,12 @@ The :doc:`Wasserstein distance </wasserstein_distance_user>`
module requires `POT <https://pot.readthedocs.io/>`_, a library that provides
several solvers for optimization problems related to Optimal Transport.
+PyTorch
+=======
+
+`PyTorch <https://pytorch.org/>`_ is currently only used as a dependency of
+`PyKeOps`_, and in some tests.
+
Scikit-learn
============
diff --git a/src/python/doc/nerve_gic_complex_sum.inc b/src/python/doc/nerve_gic_complex_sum.inc
index d633c4ff..7fe55aff 100644
--- a/src/python/doc/nerve_gic_complex_sum.inc
+++ b/src/python/doc/nerve_gic_complex_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+
| .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière |
| ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | |
- | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 |
+ | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Since: GUDHI 2.3.0 |
| :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | |
- | | the image of a scalar-valued function defined on the data. | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | the image of a scalar-valued function defined on the data. | :License: MIT (`GPL v3 </licensing/>`_) |
| | | |
| | | :Requires: `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
| | | |
diff --git a/src/python/doc/persistence_graphical_tools_sum.inc b/src/python/doc/persistence_graphical_tools_sum.inc
index ef376802..b68d3d7e 100644
--- a/src/python/doc/persistence_graphical_tools_sum.inc
+++ b/src/python/doc/persistence_graphical_tools_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+
| .. figure:: | These graphical tools comes on top of persistence results and allows | :Author: Vincent Rouvreau, Theo Lacombe |
| img/graphical_tools_representation.png | the user to display easily persistence barcode, diagram or density. | |
- | | | :Introduced in: GUDHI 2.0.0 |
+ | | | :Since: GUDHI 2.0.0 |
| | Note that these functions return the matplotlib axis, allowing | |
- | | for further modifications (title, aspect, etc.) | :Copyright: MIT |
+ | | for further modifications (title, aspect, etc.) | :License: MIT |
| | | |
| | | :Requires: matplotlib, numpy and scipy |
+-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+
diff --git a/src/python/doc/persistent_cohomology_sum.inc b/src/python/doc/persistent_cohomology_sum.inc
index 4d7b077e..0effb50f 100644
--- a/src/python/doc/persistent_cohomology_sum.inc
+++ b/src/python/doc/persistent_cohomology_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+
| .. figure:: | The theory of homology consists in attaching to a topological space | :Author: Clément Maria |
| ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | |
- | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Since: GUDHI 2.0.0 |
| | homology studies the evolution -- birth, life and death -- of these | |
- | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :Copyright: MIT |
+ | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :License: MIT |
| Torus | theory is essentially composed of three elements: topological spaces, | |
| | their homology groups and an evolution scheme. | |
| | | |
diff --git a/src/python/doc/persistent_cohomology_user.rst b/src/python/doc/persistent_cohomology_user.rst
index de83cda1..4d743aac 100644
--- a/src/python/doc/persistent_cohomology_user.rst
+++ b/src/python/doc/persistent_cohomology_user.rst
@@ -7,7 +7,7 @@ Persistent cohomology user manual
Definition
----------
===================================== ===================================== =====================================
-:Author: Clément Maria :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3
+:Author: Clément Maria :Since: GUDHI PYTHON 2.0.0 :License: GPL v3
===================================== ===================================== =====================================
+-----------------------------------------------------------------+-----------------------------------------------------------------------+
@@ -111,10 +111,3 @@ We provide several example files: run these examples with -h for details on thei
* :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
* :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
* :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
-
-Bibliography
-============
-
-.. bibliography:: ../../biblio/bibliography.bib
- :filter: docnames
- :style: unsrt
diff --git a/src/python/doc/point_cloud.rst b/src/python/doc/point_cloud.rst
index c0d4b303..192f70db 100644
--- a/src/python/doc/point_cloud.rst
+++ b/src/python/doc/point_cloud.rst
@@ -21,10 +21,25 @@ Subsampling
:special-members:
:show-inheritance:
-TimeDelayEmbedding
-------------------
+Time Delay Embedding
+--------------------
.. autoclass:: gudhi.point_cloud.timedelay.TimeDelayEmbedding
:members:
:special-members: __call__
+K nearest neighbors
+-------------------
+
+.. automodule:: gudhi.point_cloud.knn
+ :members:
+ :undoc-members:
+ :special-members: __init__
+
+Distance to measure
+-------------------
+
+.. automodule:: gudhi.point_cloud.dtm
+ :members:
+ :undoc-members:
+ :special-members: __init__
diff --git a/src/python/doc/point_cloud_sum.inc b/src/python/doc/point_cloud_sum.inc
index 85d52de7..d4761aba 100644
--- a/src/python/doc/point_cloud_sum.inc
+++ b/src/python/doc/point_cloud_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
- | | :math:`(x_1, x_2, \ldots, x_d)` | Utilities to process point clouds: read from file, subsample, etc. | :Author: Vincent Rouvreau |
- | | :math:`(y_1, y_2, \ldots, y_d)` | | |
- | | | :Introduced in: GUDHI 2.0.0 |
+ | | :math:`(x_1, x_2, \ldots, x_d)` | Utilities to process point clouds: read from file, subsample, | :Authors: Vincent Rouvreau, Marc Glisse, Masatoshi Takenouchi |
+ | | :math:`(y_1, y_2, \ldots, y_d)` | find neighbors, embed time series in higher dimension, etc. | |
+ | | | :Since: GUDHI 2.0.0 |
| | | |
- | | | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | | :License: MIT (`GPL v3 </licensing/>`_, BSD-3-Clause, Apache-2.0) |
| | Parts of this package require CGAL. | |
| | | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
| | | |
diff --git a/src/python/doc/representations_sum.inc b/src/python/doc/representations_sum.inc
index 700828f1..eac89b9d 100644
--- a/src/python/doc/representations_sum.inc
+++ b/src/python/doc/representations_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+------------------------------------------------------------------+----------------------------------------------------------------+-----------------------------------------------+
| .. figure:: | Vectorizations, distances and kernels that work on persistence | :Author: Mathieu Carrière |
| img/sklearn-tda.png | diagrams, compatible with scikit-learn. | |
- | | | :Introduced in: GUDHI 3.1.0 |
+ | | | :Since: GUDHI 3.1.0 |
| | | |
- | | | :Copyright: MIT |
+ | | | :License: MIT |
| | | |
| | | :Requires: scikit-learn |
+------------------------------------------------------------------+----------------------------------------------------------------+-----------------------------------------------+
diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc
index 857c6893..6feb74cd 100644
--- a/src/python/doc/rips_complex_sum.inc
+++ b/src/python/doc/rips_complex_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+
| .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse |
| ../../doc/Rips_complex/rips_complex_representation.png | graph. | |
- | :figclass: align-center | | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | | :Since: GUDHI 2.0.0 |
| | The filtration value of each edge is computed from a user-given | |
- | | distance function and is inserted until a user-given threshold | :Copyright: MIT |
+ | | distance function and is inserted until a user-given threshold | :License: MIT |
| | value. | |
| | | |
| | This complex can be built from a point cloud and a distance function, | |
diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst
index a27573e8..8efb12e6 100644
--- a/src/python/doc/rips_complex_user.rst
+++ b/src/python/doc/rips_complex_user.rst
@@ -8,7 +8,7 @@ Definition
----------
==================================================================== ================================ ======================
-:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse :Since: GUDHI 2.0.0 :License: GPL v3
==================================================================== ================================ ======================
+-------------------------------------------+----------------------------------------------------------------------+
diff --git a/src/python/doc/simplex_tree_ref.rst b/src/python/doc/simplex_tree_ref.rst
index 9eb8c199..46b2c1e5 100644
--- a/src/python/doc/simplex_tree_ref.rst
+++ b/src/python/doc/simplex_tree_ref.rst
@@ -8,7 +8,6 @@ Simplex tree reference manual
.. autoclass:: gudhi.SimplexTree
:members:
- :undoc-members:
:show-inheritance:
.. automethod:: gudhi.SimplexTree.__init__
diff --git a/src/python/doc/simplex_tree_sum.inc b/src/python/doc/simplex_tree_sum.inc
index 5ba58d2b..a8858f16 100644
--- a/src/python/doc/simplex_tree_sum.inc
+++ b/src/python/doc/simplex_tree_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+
| .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria |
| ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | |
- | :alt: Simplex tree representation | | :Introduced in: GUDHI 2.0.0 |
+ | :alt: Simplex tree representation | | :Since: GUDHI 2.0.0 |
| :figclass: align-center | The data structure is described in | |
- | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: MIT |
+ | | :cite:`boissonnatmariasimplextreealgorithmica` | :License: MIT |
| | | |
+----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+
| * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` |
diff --git a/src/python/doc/tangential_complex_sum.inc b/src/python/doc/tangential_complex_sum.inc
index d84aa433..45ce2a66 100644
--- a/src/python/doc/tangential_complex_sum.inc
+++ b/src/python/doc/tangential_complex_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
| .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin |
| ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | |
- | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Since: GUDHI 2.0.0 |
| | an unknown manifold. The running time depends only linearly on the | |
- | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :License: MIT (`GPL v3 </licensing/>`_) |
| | dimension :math:`k`. | |
| | | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/tangential_complex_user.rst b/src/python/doc/tangential_complex_user.rst
index 852cf5b6..3d45473b 100644
--- a/src/python/doc/tangential_complex_user.rst
+++ b/src/python/doc/tangential_complex_user.rst
@@ -194,11 +194,3 @@ The output is:
Tangential contains 4 vertices.
Inconsistencies has been fixed.
-
-
-Bibliography
-============
-
-.. bibliography:: ../../biblio/bibliography.bib
- :filter: docnames
- :style: unsrt
diff --git a/src/python/doc/wasserstein_distance_sum.inc b/src/python/doc/wasserstein_distance_sum.inc
index a97f428d..f9308e5e 100644
--- a/src/python/doc/wasserstein_distance_sum.inc
+++ b/src/python/doc/wasserstein_distance_sum.inc
@@ -1,13 +1,13 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
| .. figure:: | The q-Wasserstein distance measures the similarity between two | :Author: Theo Lacombe |
- | ../../doc/Bottleneck_distance/perturb_pd.png | persistence diagrams. It's the minimum value c that can be achieved | |
- | :figclass: align-center | by a perfect matching between the points of the two diagrams (+ all | :Introduced in: GUDHI 3.1.0 |
- | | diagonal points), where the value of a matching is defined as the | |
- | Wasserstein distance is the q-th root of the sum of the | q-th root of the sum of all edge lengths to the power q. Edge lengths| :Copyright: MIT |
- | edge lengths to the power q. | are measured in norm p, for :math:`1 \leq p \leq \infty`. | |
+ | ../../doc/Bottleneck_distance/perturb_pd.png | persistence diagrams using the sum of all edges lengths (instead of | |
+ | :figclass: align-center | the maximum). It allows to define sophisticated objects such as | :Since: GUDHI 3.1.0 |
+ | | barycenters of a family of persistence diagrams. | |
+ | | | :License: MIT |
+ | | | |
| | | :Requires: Python Optimal Transport (POT) :math:`\geq` 0.5.1 |
+-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
| * :doc:`wasserstein_distance_user` | |
diff --git a/src/python/doc/wasserstein_distance_user.rst b/src/python/doc/wasserstein_distance_user.rst
index a9b21fa5..c443bab5 100644
--- a/src/python/doc/wasserstein_distance_user.rst
+++ b/src/python/doc/wasserstein_distance_user.rst
@@ -9,10 +9,16 @@ Definition
.. include:: wasserstein_distance_sum.inc
-Functions
----------
-This implementation uses the Python Optimal Transport library and is based on
-ideas from "Large Scale Computation of Means and Cluster for Persistence
+The q-Wasserstein distance is defined as the minimal value achieved
+by a perfect matching between the points of the two diagrams (+ all
+diagonal points), where the value of a matching is defined as the
+q-th root of the sum of all edge lengths to the power q. Edge lengths
+are measured in norm p, for :math:`1 \leq p \leq \infty`.
+
+Distance Functions
+------------------
+This first implementation uses the Python Optimal Transport library and is based
+on ideas from "Large Scale Computation of Means and Cluster for Persistence
Diagrams via Optimal Transport" :cite:`10.5555/3327546.3327645`.
.. autofunction:: gudhi.wasserstein.wasserstein_distance
@@ -26,7 +32,7 @@ Morozov, and Arnur Nigmetov.
.. autofunction:: gudhi.hera.wasserstein_distance
Basic example
--------------
+*************
This example computes the 1-Wasserstein distance from 2 persistence diagrams with Euclidean ground metric.
Note that persistence diagrams must be submitted as (n x 2) numpy arrays and must not contain inf values.
@@ -48,9 +54,9 @@ The output is:
Wasserstein distance value = 1.45
-We can also have access to the optimal matching by letting `matching=True`.
+We can also have access to the optimal matching by letting `matching=True`.
It is encoded as a list of indices (i,j), meaning that the i-th point in X
-is mapped to the j-th point in Y.
+is mapped to the j-th point in Y.
An index of -1 represents the diagonal.
.. testcode::
@@ -78,9 +84,83 @@ An index of -1 represents the diagonal.
The output is:
.. testoutput::
-
+
Wasserstein distance value = 2.15
point 0 in dgm1 is matched to point 0 in dgm2
point 1 in dgm1 is matched to point 2 in dgm2
point 2 in dgm1 is matched to the diagonal
point 1 in dgm2 is matched to the diagonal
+
+Barycenters
+-----------
+
+A Frechet mean (or barycenter) is a generalization of the arithmetic
+mean in a non linear space such as the one of persistence diagrams.
+Given a set of persistence diagrams :math:`\mu_1 \dots \mu_n`, it is
+defined as a minimizer of the variance functional, that is of
+:math:`\mu \mapsto \sum_{i=1}^n d_2(\mu,\mu_i)^2`.
+where :math:`d_2` denotes the Wasserstein-2 distance between
+persistence diagrams.
+It is known to exist and is generically unique. However, an exact
+computation is in general untractable. Current implementation
+available is based on (Turner et al., 2014),
+:cite:`turner2014frechet`
+and uses an EM-scheme to
+provide a local minimum of the variance functional (somewhat similar
+to the Lloyd algorithm to estimate a solution to the k-means
+problem). The local minimum returned depends on the initialization of
+the barycenter.
+The combinatorial structure of the algorithm limits its
+performances on large scale problems (thousands of diagrams and of points
+per diagram).
+
+.. figure::
+ ./img/barycenter.png
+ :figclass: align-center
+
+ Illustration of Frechet mean between persistence
+ diagrams.
+
+
+.. autofunction:: gudhi.wasserstein.barycenter.lagrangian_barycenter
+
+Basic example
+*************
+
+This example estimates the Frechet mean (aka Wasserstein barycenter) between
+four persistence diagrams.
+It is initialized on the 4th diagram.
+As the algorithm is not convex, its output depends on the initialization and
+is only a local minimum of the objective function.
+Initialization can be either given as an integer (in which case the i-th
+diagram of the list is used as initial estimate) or as a diagram.
+If None, it will randomly select one of the diagrams of the list
+as initial estimate.
+Note that persistence diagrams must be submitted as
+(n x 2) numpy arrays and must not contain inf values.
+
+
+.. testcode::
+
+ from gudhi.wasserstein.barycenter import lagrangian_barycenter
+ import numpy as np
+
+ dg1 = np.array([[0.2, 0.5]])
+ dg2 = np.array([[0.2, 0.7]])
+ dg3 = np.array([[0.3, 0.6], [0.7, 0.8], [0.2, 0.3]])
+ dg4 = np.array([])
+ pdiagset = [dg1, dg2, dg3, dg4]
+ bary = lagrangian_barycenter(pdiagset=pdiagset,init=3)
+
+ message = "Wasserstein barycenter estimated:"
+ print(message)
+ print(bary)
+
+The output is:
+
+.. testoutput::
+
+ Wasserstein barycenter estimated:
+ [[0.27916667 0.55416667]
+ [0.7375 0.7625 ]
+ [0.2375 0.2625 ]]
diff --git a/src/python/doc/witness_complex_sum.inc b/src/python/doc/witness_complex_sum.inc
index 71b65a71..34d4df4a 100644
--- a/src/python/doc/witness_complex_sum.inc
+++ b/src/python/doc/witness_complex_sum.inc
@@ -1,12 +1,12 @@
.. table::
- :widths: 30 50 20
+ :widths: 30 40 30
+-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------+
| .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich |
| ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | |
- | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 |
+ | :alt: Witness complex representation | | :Since: GUDHI 2.0.0 |
| :figclass: align-center | The data structure is described in | |
- | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 </licensing/>`_ for Euclidean versions only) |
+ | | :cite:`boissonnatmariasimplextreealgorithmica`. | :License: MIT (`GPL v3 </licensing/>`_ for Euclidean versions only) |
| | | |
| | | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 for Euclidean versions only |
+-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/witness_complex_user.rst b/src/python/doc/witness_complex_user.rst
index 7087fa98..08dcd288 100644
--- a/src/python/doc/witness_complex_user.rst
+++ b/src/python/doc/witness_complex_user.rst
@@ -126,10 +126,3 @@ Example2: Computing persistence using strong relaxed witness complex
Here is an example of constructing a strong witness complex filtration and computing persistence on it:
* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
-
-Bibliography
-============
-
-.. bibliography:: ../../biblio/bibliography.bib
- :filter: docnames
- :style: unsrt
diff --git a/src/python/doc/zbibliography.rst b/src/python/doc/zbibliography.rst
new file mode 100644
index 00000000..e23fcf25
--- /dev/null
+++ b/src/python/doc/zbibliography.rst
@@ -0,0 +1,10 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Bibliography
+------------
+
+.. bibliography:: ../../biblio/bibliography.bib
+ :style: plain
+
diff --git a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
index 4079a469..727af4fa 100755
--- a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -1,11 +1,15 @@
#!/usr/bin/env python
import argparse
+import errno
+import os
import matplotlib.pyplot as plot
import gudhi
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+ which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+ license details.
Author(s): Vincent Rouvreau
Copyright (C) 2016 Inria
@@ -41,7 +45,7 @@ args = parser.parse_args()
with open(args.file, "r") as f:
first_line = f.readline()
if (first_line == "OFF\n") or (first_line == "nOFF\n"):
- print("#####################################################################")
+ print("##############################################################")
print("AlphaComplex creation from points read in a OFF file")
message = "AlphaComplex with max_edge_length=" + repr(args.max_alpha_square)
@@ -64,6 +68,7 @@ with open(args.file, "r") as f:
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
- print(args.file, "is not a valid OFF file")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ args.file)
f.close()
diff --git a/src/python/example/alpha_complex_from_points_example.py b/src/python/example/alpha_complex_from_points_example.py
index 73faf17c..465632eb 100755
--- a/src/python/example/alpha_complex_from_points_example.py
+++ b/src/python/example/alpha_complex_from_points_example.py
@@ -46,9 +46,6 @@ if simplex_tree.find([4]):
else:
print("[4] Not found...")
-# Some insertions, simplex_tree needs to initialize filtrations
-simplex_tree.initialize_filtration()
-
print("dimension=", simplex_tree.dimension())
print("filtrations=")
for simplex_with_filtration in simplex_tree.get_filtration():
diff --git a/src/python/example/alpha_rips_persistence_bottleneck_distance.py b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
index d5c33ec8..3e12b0d5 100755
--- a/src/python/example/alpha_rips_persistence_bottleneck_distance.py
+++ b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
@@ -3,9 +3,14 @@
import gudhi
import argparse
import math
-
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+import errno
+import os
+import numpy as np
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+ which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+ license details.
Author(s): Vincent Rouvreau
Copyright (C) 2016 Inria
@@ -36,7 +41,7 @@ with open(args.file, "r") as f:
first_line = f.readline()
if (first_line == "OFF\n") or (first_line == "nOFF\n"):
point_cloud = gudhi.read_points_from_off_file(off_file=args.file)
- print("#####################################################################")
+ print("##############################################################")
print("RipsComplex creation from points read in a OFF file")
message = "RipsComplex with max_edge_length=" + repr(args.threshold)
@@ -46,14 +51,15 @@ with open(args.file, "r") as f:
points=point_cloud, max_edge_length=args.threshold
)
- rips_stree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+ rips_stree = rips_complex.create_simplex_tree(
+ max_dimension=args.max_dimension)
message = "Number of simplices=" + repr(rips_stree.num_simplices())
print(message)
- rips_diag = rips_stree.persistence()
+ rips_stree.compute_persistence()
- print("#####################################################################")
+ print("##############################################################")
print("AlphaComplex creation from points read in a OFF file")
message = "AlphaComplex with max_edge_length=" + repr(args.threshold)
@@ -67,18 +73,13 @@ with open(args.file, "r") as f:
message = "Number of simplices=" + repr(alpha_stree.num_simplices())
print(message)
- alpha_diag = alpha_stree.persistence()
+ alpha_stree.compute_persistence()
max_b_distance = 0.0
for dim in range(args.max_dimension):
# Alpha persistence values needs to be transform because filtration
# values are alpha square values
- funcs = [math.sqrt, math.sqrt]
- alpha_intervals = []
- for interval in alpha_stree.persistence_intervals_in_dimension(dim):
- alpha_intervals.append(
- map(lambda func, value: func(value), funcs, interval)
- )
+ alpha_intervals = np.sqrt(alpha_stree.persistence_intervals_in_dimension(dim))
rips_intervals = rips_stree.persistence_intervals_in_dimension(dim)
bottleneck_distance = gudhi.bottleneck_distance(
@@ -93,13 +94,13 @@ with open(args.file, "r") as f:
print(message)
max_b_distance = max(bottleneck_distance, max_b_distance)
- print(
- "================================================================================"
- )
+ print("==============================================================")
message = "Bottleneck distance is " + repr(max_b_distance)
print(message)
else:
- print(args.file, "is not a valid OFF file")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ args.file)
+
f.close()
diff --git a/src/python/example/diagram_vectorizations_distances_kernels.py b/src/python/example/diagram_vectorizations_distances_kernels.py
index 507ead7c..de22d9e7 100755
--- a/src/python/example/diagram_vectorizations_distances_kernels.py
+++ b/src/python/example/diagram_vectorizations_distances_kernels.py
@@ -9,7 +9,7 @@ from gudhi.representations import DiagramSelector, Clamping, Landscape, Silhouet
TopologicalVector, DiagramScaler, BirthPersistenceTransform,\
PersistenceImage, PersistenceWeightedGaussianKernel, Entropy, \
PersistenceScaleSpaceKernel, SlicedWassersteinDistance,\
- SlicedWassersteinKernel, BottleneckDistance, WassersteinDistance, PersistenceFisherKernel
+ SlicedWassersteinKernel, BottleneckDistance, PersistenceFisherKernel
D = np.array([[0.,4.],[1.,2.],[3.,8.],[6.,8.], [0., np.inf], [5., np.inf]])
diags = [D]
diff --git a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
index 4903667e..e1e572df 100755
--- a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -1,11 +1,15 @@
#!/usr/bin/env python
import argparse
+import errno
+import os
import matplotlib.pyplot as plot
import gudhi
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+ which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+ license details.
Author(s): Vincent Rouvreau
Copyright (C) 2016 Inria
@@ -44,8 +48,9 @@ args = parser.parse_args()
with open(args.file, "r") as f:
first_line = f.readline()
if (first_line == "OFF\n") or (first_line == "nOFF\n"):
- print("#####################################################################")
- print("EuclideanStrongWitnessComplex creation from points read in a OFF file")
+ print("##############################################################")
+ print("EuclideanStrongWitnessComplex creation from points read "\
+ "in a OFF file")
witnesses = gudhi.read_points_from_off_file(off_file=args.file)
landmarks = gudhi.pick_n_random_points(
@@ -64,7 +69,8 @@ with open(args.file, "r") as f:
witnesses=witnesses, landmarks=landmarks
)
simplex_tree = witness_complex.create_simplex_tree(
- max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension
+ max_alpha_square=args.max_alpha_square,
+ limit_dimension=args.limit_dimension
)
message = "Number of simplices=" + repr(simplex_tree.num_simplices())
@@ -79,6 +85,7 @@ with open(args.file, "r") as f:
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
- print(args.file, "is not a valid OFF file")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ args.file)
f.close()
diff --git a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
index 339a8577..58cb2bb5 100755
--- a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -1,11 +1,15 @@
#!/usr/bin/env python
import argparse
+import errno
+import os
import matplotlib.pyplot as plot
import gudhi
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+ which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+ license details.
Author(s): Vincent Rouvreau
Copyright (C) 2016 Inria
@@ -78,6 +82,7 @@ with open(args.file, "r") as f:
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
- print(args.file, "is not a valid OFF file")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ args.file)
f.close()
diff --git a/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
index c692e66f..499171df 100755
--- a/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
+++ b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
@@ -2,10 +2,14 @@
import argparse
import matplotlib.pyplot as plot
+import errno
+import os
import gudhi
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+ which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+ license details.
Author(s): Vincent Rouvreau
Copyright (C) 2016 Inria
@@ -57,9 +61,10 @@ parser.add_argument(
args = parser.parse_args()
if is_file_perseus(args.file):
- print("#####################################################################")
+ print("##################################################################")
print("PeriodicCubicalComplex creation")
- periodic_cubical_complex = gudhi.PeriodicCubicalComplex(perseus_file=args.file)
+ periodic_cubical_complex = gudhi.PeriodicCubicalComplex(
+ perseus_file=args.file)
print("persistence(homology_coeff_field=3, min_persistence=0)=")
diag = periodic_cubical_complex.persistence(
@@ -73,4 +78,5 @@ if is_file_perseus(args.file):
gudhi.plot_persistence_barcode(diag)
plot.show()
else:
- print(args.file, "is not a valid perseus style file")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ args.file)
diff --git a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
index c757aca7..6f992508 100755
--- a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -1,11 +1,15 @@
#!/usr/bin/env python
import argparse
+import errno
+import os
import matplotlib.pyplot as plot
import gudhi
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+ which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+ license details.
Author(s): Vincent Rouvreau
Copyright (C) 2016 Inria
@@ -42,10 +46,11 @@ args = parser.parse_args()
with open(args.file, "r") as f:
first_line = f.readline()
if (first_line == "OFF\n") or (first_line == "nOFF\n"):
- print("#####################################################################")
+ print("##############################################################")
print("RipsComplex creation from points read in a OFF file")
- message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
+ message = "RipsComplex with max_edge_length=" + \
+ repr(args.max_edge_length)
print(message)
point_cloud = gudhi.read_points_from_off_file(off_file=args.file)
@@ -68,6 +73,7 @@ with open(args.file, "r") as f:
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
- print(args.file, "is not a valid OFF file")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ args.file)
f.close()
diff --git a/src/python/example/simplex_tree_example.py b/src/python/example/simplex_tree_example.py
index 34833899..c4635dc5 100755
--- a/src/python/example/simplex_tree_example.py
+++ b/src/python/example/simplex_tree_example.py
@@ -42,7 +42,6 @@ print("simplices=")
for simplex_with_filtration in st.get_simplices():
print("(%s, %.2f)" % tuple(simplex_with_filtration))
-st.initialize_filtration()
print("filtration=")
for simplex_with_filtration in st.get_filtration():
print("(%s, %.2f)" % tuple(simplex_with_filtration))
diff --git a/src/python/example/tangential_complex_plain_homology_from_off_file_example.py b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
index f0df2189..85bade4a 100755
--- a/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
+++ b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
@@ -1,11 +1,15 @@
#!/usr/bin/env python
import argparse
+import errno
+import os
import matplotlib.pyplot as plot
import gudhi
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+ which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+ license details.
Author(s): Vincent Rouvreau
Copyright (C) 2016 Inria
@@ -19,7 +23,7 @@ __copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
parser = argparse.ArgumentParser(
- description="TangentialComplex creation from " "points read in a OFF file.",
+ description="TangentialComplex creation from points read in a OFF file.",
epilog="Example: "
"example/tangential_complex_plain_homology_from_off_file_example.py "
"-f ../data/points/tore3D_300.off -i 3"
@@ -41,10 +45,11 @@ args = parser.parse_args()
with open(args.file, "r") as f:
first_line = f.readline()
if (first_line == "OFF\n") or (first_line == "nOFF\n"):
- print("#####################################################################")
+ print("##############################################################")
print("TangentialComplex creation from points read in a OFF file")
- tc = gudhi.TangentialComplex(intrisic_dim=args.intrisic_dim, off_file=args.file)
+ tc = gudhi.TangentialComplex(intrisic_dim=args.intrisic_dim,
+ off_file=args.file)
tc.compute_tangential_complex()
st = tc.create_simplex_tree()
@@ -60,6 +65,7 @@ with open(args.file, "r") as f:
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
- print(args.file, "is not a valid OFF file")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ args.file)
f.close()
diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx
index fff3e920..e04dc652 100644
--- a/src/python/gudhi/alpha_complex.pyx
+++ b/src/python/gudhi/alpha_complex.pyx
@@ -1,5 +1,7 @@
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+# which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+# license details.
# Author(s): Vincent Rouvreau
#
# Copyright (C) 2016 Inria
@@ -7,6 +9,7 @@
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from __future__ import print_function
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
@@ -69,7 +72,8 @@ cdef class AlphaComplex:
def __cinit__(self, points = None, off_file = ''):
if off_file:
if os.path.isfile(off_file):
- self.thisptr = new Alpha_complex_interface(off_file.encode('utf-8'), True)
+ self.thisptr = new Alpha_complex_interface(
+ off_file.encode('utf-8'), True)
else:
print("file " + off_file + " not found.")
else:
diff --git a/src/python/gudhi/cubical_complex.pyx b/src/python/gudhi/cubical_complex.pyx
index cbeda014..007abcb6 100644
--- a/src/python/gudhi/cubical_complex.pyx
+++ b/src/python/gudhi/cubical_complex.pyx
@@ -1,5 +1,7 @@
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+# which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+# license details.
# Author(s): Vincent Rouvreau
#
# Copyright (C) 2016 Inria
@@ -7,12 +9,15 @@
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from __future__ import print_function
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
from libcpp.string cimport string
from libcpp cimport bool
+import errno
import os
+import sys
import numpy as np
@@ -30,7 +35,8 @@ cdef extern from "Cubical_complex_interface.h" namespace "Gudhi":
cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
cdef cppclass Cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Cubical_complex::Cubical_complex_interface<>>":
Cubical_complex_persistence_interface(Bitmap_cubical_complex_base_interface * st, bool persistence_dim_max)
- vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ void compute_persistence(int homology_coeff_field, double min_persistence)
+ vector[pair[int, pair[double, double]]] get_persistence()
vector[int] betti_numbers()
vector[int] persistent_betti_numbers(double from_value, double to_value)
vector[pair[double,double]] intervals_in_dimension(int dimension)
@@ -87,10 +93,12 @@ cdef class CubicalComplex:
if os.path.isfile(perseus_file):
self.thisptr = new Bitmap_cubical_complex_base_interface(perseus_file.encode('utf-8'))
else:
- print("file " + perseus_file + " not found.")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ perseus_file)
else:
print("CubicalComplex can be constructed from dimensions and "
- "top_dimensional_cells or from a Perseus-style file name.")
+ "top_dimensional_cells or from a Perseus-style file name.",
+ file=sys.stderr)
def __dealloc__(self):
if self.thisptr != NULL:
@@ -122,8 +130,31 @@ cdef class CubicalComplex:
"""
return self.thisptr.dimension()
+ def compute_persistence(self, homology_coeff_field=11, min_persistence=0):
+ """This function computes the persistence of the complex, so it can be
+ accessed through :func:`persistent_betti_numbers`,
+ :func:`persistence_intervals_in_dimension`, etc. This function is
+ equivalent to :func:`persistence` when you do not want the list
+ :func:`persistence` returns.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: Nothing.
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ assert self.__is_defined()
+ self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True)
+ self.pcohptr.compute_persistence(homology_coeff_field, min_persistence)
+
def persistence(self, homology_coeff_field=11, min_persistence=0):
- """This function returns the persistence of the complex.
+ """This function computes and returns the persistence of the complex.
:param homology_coeff_field: The homology coefficient field. Must be a
prime number
@@ -136,30 +167,22 @@ cdef class CubicalComplex:
:returns: list of pairs(dimension, pair(birth, death)) -- the
persistence of the complex.
"""
- if self.pcohptr != NULL:
- del self.pcohptr
- if self.thisptr != NULL:
- self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True)
- cdef vector[pair[int, pair[double, double]]] persistence_result
- if self.pcohptr != NULL:
- persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
- return persistence_result
+ self.compute_persistence(homology_coeff_field, min_persistence)
+ return self.pcohptr.get_persistence()
def betti_numbers(self):
"""This function returns the Betti numbers of the complex.
:returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
- :note: betti_numbers function requires persistence function to be
+ :note: betti_numbers function requires :func:`compute_persistence` function to be
launched first.
:note: betti_numbers function always returns [1, 0, 0, ...] as infinity
filtration cubes are not removed from the complex.
"""
- cdef vector[int] bn_result
- if self.pcohptr != NULL:
- bn_result = self.pcohptr.betti_numbers()
- return bn_result
+ assert self.pcohptr != NULL, "compute_persistence() must be called before betti_numbers()"
+ return self.pcohptr.betti_numbers()
def persistent_betti_numbers(self, from_value, to_value):
"""This function returns the persistent Betti numbers of the complex.
@@ -174,13 +197,11 @@ cdef class CubicalComplex:
:returns: list of int -- The persistent Betti numbers ([B0, B1, ...,
Bn]).
- :note: persistent_betti_numbers function requires persistence
+ :note: persistent_betti_numbers function requires :func:`compute_persistence`
function to be launched first.
"""
- cdef vector[int] pbn_result
- if self.pcohptr != NULL:
- pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
- return pbn_result
+ assert self.pcohptr != NULL, "compute_persistence() must be called before persistent_betti_numbers()"
+ return self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
def persistence_intervals_in_dimension(self, dimension):
"""This function returns the persistence intervals of the complex in a
@@ -191,13 +212,8 @@ cdef class CubicalComplex:
:returns: The persistence intervals.
:rtype: numpy array of dimension 2
- :note: intervals_in_dim function requires persistence function to be
+ :note: intervals_in_dim function requires :func:`compute_persistence` function to be
launched first.
"""
- cdef vector[pair[double,double]] intervals_result
- if self.pcohptr != NULL:
- intervals_result = self.pcohptr.intervals_in_dimension(dimension)
- else:
- print("intervals_in_dim function requires persistence function"
- " to be launched first.")
- return np.array(intervals_result)
+ assert self.pcohptr != NULL, "compute_persistence() must be called before persistence_intervals_in_dimension()"
+ return np.array(self.pcohptr.intervals_in_dimension(dimension))
diff --git a/src/python/gudhi/nerve_gic.pyx b/src/python/gudhi/nerve_gic.pyx
index 45cc8eba..9c89b239 100644
--- a/src/python/gudhi/nerve_gic.pyx
+++ b/src/python/gudhi/nerve_gic.pyx
@@ -1,5 +1,7 @@
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+# which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+# license details.
# Author(s): Vincent Rouvreau
#
# Copyright (C) 2018 Inria
@@ -7,11 +9,13 @@
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from __future__ import print_function
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
from libcpp.string cimport string
from libcpp cimport bool
+import errno
import os
from libc.stdint cimport intptr_t
@@ -96,7 +100,8 @@ cdef class CoverComplex:
return self.thisptr != NULL
def set_point_cloud_from_range(self, cloud):
- """ Reads and stores the input point cloud from a vector stored in memory.
+ """ Reads and stores the input point cloud from a vector stored in
+ memory.
:param cloud: Input vector containing the point cloud.
:type cloud: vector[vector[double]]
@@ -104,7 +109,8 @@ cdef class CoverComplex:
return self.thisptr.set_point_cloud_from_range(cloud)
def set_distances_from_range(self, distance_matrix):
- """ Reads and stores the input distance matrix from a vector stored in memory.
+ """ Reads and stores the input distance matrix from a vector stored in
+ memory.
:param distance_matrix: Input vector containing the distance matrix.
:type distance_matrix: vector[vector[double]]
@@ -163,7 +169,8 @@ cdef class CoverComplex:
"""
stree = SimplexTree()
cdef intptr_t stree_int_ptr=stree.thisptr
- self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr)
+ self.thisptr.create_simplex_tree(
+ <Simplex_tree_interface_full_featured*>stree_int_ptr)
return stree
def find_simplices(self):
@@ -182,8 +189,8 @@ cdef class CoverComplex:
if os.path.isfile(off_file):
return self.thisptr.read_point_cloud(off_file.encode('utf-8'))
else:
- print("file " + off_file + " not found.")
- return False
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ off_file)
def set_automatic_resolution(self):
"""Computes the optimal length of intervals (i.e. the smallest interval
@@ -214,7 +221,8 @@ cdef class CoverComplex:
if os.path.isfile(color_file_name):
self.thisptr.set_color_from_file(color_file_name.encode('utf-8'))
else:
- print("file " + color_file_name + " not found.")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ color_file_name)
def set_color_from_range(self, color):
"""Computes the function used to color the nodes of the simplicial
@@ -235,7 +243,8 @@ cdef class CoverComplex:
if os.path.isfile(cover_file_name):
self.thisptr.set_cover_from_file(cover_file_name.encode('utf-8'))
else:
- print("file " + cover_file_name + " not found.")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ cover_file_name)
def set_cover_from_function(self):
"""Creates a cover C from the preimages of the function f.
@@ -268,7 +277,8 @@ cdef class CoverComplex:
if os.path.isfile(func_file_name):
self.thisptr.set_function_from_file(func_file_name.encode('utf-8'))
else:
- print("file " + func_file_name + " not found.")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ func_file_name)
def set_function_from_range(self, function):
"""Creates the function f from a vector stored in memory.
@@ -302,14 +312,15 @@ cdef class CoverComplex:
"""Creates a graph G from a file containing the edges.
:param graph_file_name: Name of the input graph file. The graph file
- contains one edge per line, each edge being represented by the IDs of
- its two nodes.
+ contains one edge per line, each edge being represented by the IDs
+ of its two nodes.
:type graph_file_name: string
"""
if os.path.isfile(graph_file_name):
self.thisptr.set_graph_from_file(graph_file_name.encode('utf-8'))
else:
- print("file " + graph_file_name + " not found.")
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ graph_file_name)
def set_graph_from_OFF(self):
"""Creates a graph G from the triangulation given by the input OFF
diff --git a/src/python/gudhi/off_reader.pyx b/src/python/gudhi/off_reader.pyx
index 7e6d9d80..a3200704 100644
--- a/src/python/gudhi/off_reader.pyx
+++ b/src/python/gudhi/off_reader.pyx
@@ -1,5 +1,7 @@
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
+# which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full
+# license details.
# Author(s): Vincent Rouvreau
#
# Copyright (C) 2016 Inria
@@ -7,9 +9,11 @@
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from __future__ import print_function
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.string cimport string
+import errno
import os
__author__ = "Vincent Rouvreau"
@@ -32,6 +36,6 @@ def read_points_from_off_file(off_file=''):
if os.path.isfile(off_file):
return read_points_from_OFF_file(off_file.encode('utf-8'))
else:
- print("file " + off_file + " not found.")
- return []
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+ off_file)
diff --git a/src/python/gudhi/periodic_cubical_complex.pyx b/src/python/gudhi/periodic_cubical_complex.pyx
index 37f76201..246a3a02 100644
--- a/src/python/gudhi/periodic_cubical_complex.pyx
+++ b/src/python/gudhi/periodic_cubical_complex.pyx
@@ -7,11 +7,13 @@
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from __future__ import print_function
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
from libcpp.string cimport string
from libcpp cimport bool
+import sys
import os
import numpy as np
@@ -30,7 +32,8 @@ cdef extern from "Cubical_complex_interface.h" namespace "Gudhi":
cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
cdef cppclass Periodic_cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Cubical_complex::Cubical_complex_interface<Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double>>>":
Periodic_cubical_complex_persistence_interface(Periodic_cubical_complex_base_interface * st, bool persistence_dim_max)
- vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ void compute_persistence(int homology_coeff_field, double min_persistence)
+ vector[pair[int, pair[double, double]]] get_persistence()
vector[int] betti_numbers()
vector[int] persistent_betti_numbers(double from_value, double to_value)
vector[pair[double,double]] intervals_in_dimension(int dimension)
@@ -95,12 +98,12 @@ cdef class PeriodicCubicalComplex:
if os.path.isfile(perseus_file):
self.thisptr = new Periodic_cubical_complex_base_interface(perseus_file.encode('utf-8'))
else:
- print("file " + perseus_file + " not found.")
+ print("file " + perseus_file + " not found.", file=sys.stderr)
else:
print("CubicalComplex can be constructed from dimensions, "
"top_dimensional_cells and periodic_dimensions, or from "
"top_dimensional_cells and periodic_dimensions or from "
- "a Perseus-style file name.")
+ "a Perseus-style file name.", file=sys.stderr)
def __dealloc__(self):
if self.thisptr != NULL:
@@ -132,8 +135,31 @@ cdef class PeriodicCubicalComplex:
"""
return self.thisptr.dimension()
+ def compute_persistence(self, homology_coeff_field=11, min_persistence=0):
+ """This function computes the persistence of the complex, so it can be
+ accessed through :func:`persistent_betti_numbers`,
+ :func:`persistence_intervals_in_dimension`, etc. This function is
+ equivalent to :func:`persistence` when you do not want the list
+ :func:`persistence` returns.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: Nothing.
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ assert self.__is_defined()
+ self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True)
+ self.pcohptr.compute_persistence(homology_coeff_field, min_persistence)
+
def persistence(self, homology_coeff_field=11, min_persistence=0):
- """This function returns the persistence of the complex.
+ """This function computes and returns the persistence of the complex.
:param homology_coeff_field: The homology coefficient field. Must be a
prime number
@@ -146,30 +172,22 @@ cdef class PeriodicCubicalComplex:
:returns: list of pairs(dimension, pair(birth, death)) -- the
persistence of the complex.
"""
- if self.pcohptr != NULL:
- del self.pcohptr
- if self.thisptr != NULL:
- self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True)
- cdef vector[pair[int, pair[double, double]]] persistence_result
- if self.pcohptr != NULL:
- persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
- return persistence_result
+ self.compute_persistence(homology_coeff_field, min_persistence)
+ return self.pcohptr.get_persistence()
def betti_numbers(self):
"""This function returns the Betti numbers of the complex.
:returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
- :note: betti_numbers function requires persistence function to be
+ :note: betti_numbers function requires :func:`compute_persistence` function to be
launched first.
- :note: betti_numbers function always returns [1, 0, 0, ...] as infinity
+ :note: This function always returns the Betti numbers of a torus as infinity
filtration cubes are not removed from the complex.
"""
- cdef vector[int] bn_result
- if self.pcohptr != NULL:
- bn_result = self.pcohptr.betti_numbers()
- return bn_result
+ assert self.pcohptr != NULL, "compute_persistence() must be called before betti_numbers()"
+ return self.pcohptr.betti_numbers()
def persistent_betti_numbers(self, from_value, to_value):
"""This function returns the persistent Betti numbers of the complex.
@@ -184,13 +202,11 @@ cdef class PeriodicCubicalComplex:
:returns: list of int -- The persistent Betti numbers ([B0, B1, ...,
Bn]).
- :note: persistent_betti_numbers function requires persistence
+ :note: persistent_betti_numbers function requires :func:`compute_persistence`
function to be launched first.
"""
- cdef vector[int] pbn_result
- if self.pcohptr != NULL:
- pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
- return pbn_result
+ assert self.pcohptr != NULL, "compute_persistence() must be called before persistent_betti_numbers()"
+ return self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
def persistence_intervals_in_dimension(self, dimension):
"""This function returns the persistence intervals of the complex in a
@@ -201,13 +217,8 @@ cdef class PeriodicCubicalComplex:
:returns: The persistence intervals.
:rtype: numpy array of dimension 2
- :note: intervals_in_dim function requires persistence function to be
+ :note: intervals_in_dim function requires :func:`compute_persistence` function to be
launched first.
"""
- cdef vector[pair[double,double]] intervals_result
- if self.pcohptr != NULL:
- intervals_result = self.pcohptr.intervals_in_dimension(dimension)
- else:
- print("intervals_in_dim function requires persistence function"
- " to be launched first.")
- return np.array(intervals_result)
+ assert self.pcohptr != NULL, "compute_persistence() must be called before persistence_intervals_in_dimension()"
+ return np.array(self.pcohptr.intervals_in_dimension(dimension))
diff --git a/src/python/gudhi/point_cloud/dtm.py b/src/python/gudhi/point_cloud/dtm.py
new file mode 100644
index 00000000..13e16d24
--- /dev/null
+++ b/src/python/gudhi/point_cloud/dtm.py
@@ -0,0 +1,70 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Marc Glisse
+#
+# Copyright (C) 2020 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
+from .knn import KNearestNeighbors
+
+__author__ = "Marc Glisse"
+__copyright__ = "Copyright (C) 2020 Inria"
+__license__ = "MIT"
+
+
+class DistanceToMeasure:
+ """
+ Class to compute the distance to the empirical measure defined by a point set, as introduced in :cite:`dtm`.
+ """
+
+ def __init__(self, k, q=2, **kwargs):
+ """
+ Args:
+ k (int): number of neighbors (possibly including the point itself).
+ q (float): order used to compute the distance to measure. Defaults to 2.
+ kwargs: same parameters as :class:`~gudhi.point_cloud.knn.KNearestNeighbors`, except that
+ metric="neighbors" means that :func:`transform` expects an array with the distances
+ to the k nearest neighbors.
+ """
+ self.k = k
+ self.q = q
+ self.params = kwargs
+
+ def fit_transform(self, X, y=None):
+ return self.fit(X).transform(X)
+
+ def fit(self, X, y=None):
+ """
+ Args:
+ X (numpy.array): coordinates for mass points.
+ """
+ if self.params.setdefault("metric", "euclidean") != "neighbors":
+ self.knn = KNearestNeighbors(
+ self.k, return_index=False, return_distance=True, sort_results=False, **self.params
+ )
+ self.knn.fit(X)
+ return self
+
+ def transform(self, X):
+ """
+ Args:
+ X (numpy.array): coordinates for query points, or distance matrix if metric is "precomputed",
+ or distances to the k nearest neighbors if metric is "neighbors" (if the array has more
+ than k columns, the remaining ones are ignored).
+
+ Returns:
+ numpy.array: a 1-d array with, for each point of X, its distance to the measure defined
+ by the argument of :func:`fit`.
+ """
+ if self.params["metric"] == "neighbors":
+ distances = X[:, : self.k]
+ else:
+ distances = self.knn.transform(X)
+ distances = distances ** self.q
+ dtm = distances.sum(-1) / self.k
+ dtm = dtm ** (1.0 / self.q)
+ # We compute too many powers, 1/p in knn then q in dtm, 1/q in dtm then q or some log in the caller.
+ # Add option to skip the final root?
+ return dtm
diff --git a/src/python/gudhi/point_cloud/knn.py b/src/python/gudhi/point_cloud/knn.py
new file mode 100644
index 00000000..07553d6d
--- /dev/null
+++ b/src/python/gudhi/point_cloud/knn.py
@@ -0,0 +1,324 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Marc Glisse
+#
+# Copyright (C) 2020 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
+import numpy
+
+# TODO: https://github.com/facebookresearch/faiss
+
+__author__ = "Marc Glisse"
+__copyright__ = "Copyright (C) 2020 Inria"
+__license__ = "MIT"
+
+
+class KNearestNeighbors:
+ """
+ Class wrapping several implementations for computing the k nearest neighbors in a point set.
+ """
+
+ def __init__(self, k, return_index=True, return_distance=False, metric="euclidean", **kwargs):
+ """
+ Args:
+ k (int): number of neighbors (possibly including the point itself).
+ return_index (bool): if True, return the index of each neighbor.
+ return_distance (bool): if True, return the distance to each neighbor.
+ implementation (str): choice of the library that does the real work.
+
+ * 'keops' for a brute-force, CUDA implementation through pykeops. Useful when the dimension becomes large (10+) but the number of points remains low (less than a million). Only "minkowski" and its aliases are supported.
+ * 'ckdtree' for scipy's cKDTree. Only "minkowski" and its aliases are supported.
+ * 'sklearn' for scikit-learn's NearestNeighbors. Note that this provides in particular an option algorithm="brute".
+ * 'hnsw' for hnswlib.Index. It can be very fast but does not provide guarantees. Only supports "euclidean" for now.
+ * None will try to select a sensible one (scipy if possible, scikit-learn otherwise).
+ metric (str): see `sklearn.neighbors.NearestNeighbors`.
+ eps (float): relative error when computing nearest neighbors with the cKDTree.
+ p (float): norm L^p on input points (including numpy.inf) if metric is "minkowski". Defaults to 2.
+ n_jobs (int): number of jobs to schedule for parallel processing of nearest neighbors on the CPU.
+ If -1 is given all processors are used. Default: 1.
+ sort_results (bool): if True, then distances and indices of each point are
+ sorted on return, so that the first column contains the closest points.
+ Otherwise, neighbors are returned in an arbitrary order. Defaults to True.
+ enable_autodiff (bool): if the input is a torch.tensor, jax.numpy.ndarray or tensorflow.Tensor, this
+ instructs the function to compute distances in a way that works with automatic differentiation.
+ This is experimental, not supported for all metrics, and requires the package EagerPy.
+ Defaults to False.
+ kwargs: additional parameters are forwarded to the backends.
+ """
+ self.k = k
+ self.return_index = return_index
+ self.return_distance = return_distance
+ self.metric = metric
+ self.params = kwargs
+ # canonicalize
+ if metric == "euclidean":
+ self.params["p"] = 2
+ self.metric = "minkowski"
+ elif metric == "manhattan":
+ self.params["p"] = 1
+ self.metric = "minkowski"
+ elif metric == "chebyshev":
+ self.params["p"] = numpy.inf
+ self.metric = "minkowski"
+ elif metric == "minkowski":
+ self.params["p"] = kwargs.get("p", 2)
+ if self.params.get("implementation") in {"keops", "ckdtree"}:
+ assert self.metric == "minkowski"
+ if self.params.get("implementation") == "hnsw":
+ assert self.metric == "minkowski" and self.params["p"] == 2
+ if not self.params.get("implementation"):
+ if self.metric == "minkowski":
+ self.params["implementation"] = "ckdtree"
+ else:
+ self.params["implementation"] = "sklearn"
+ if not return_distance:
+ self.params["enable_autodiff"] = False
+
+ def fit_transform(self, X, y=None):
+ return self.fit(X).transform(X)
+
+ def fit(self, X, y=None):
+ """
+ Args:
+ X (numpy.array): coordinates for reference points.
+ """
+ self.ref_points = X
+ if self.params.get("enable_autodiff", False):
+ import eagerpy as ep
+
+ X = ep.astensor(X)
+ if self.params["implementation"] != "keops" or not isinstance(X, ep.PyTorchTensor):
+ # I don't know a clever way to reuse a GPU tensor from tensorflow in pytorch
+ # without copying to/from the CPU.
+ X = X.numpy()
+ if self.params["implementation"] == "ckdtree":
+ # sklearn could handle this, but it is much slower
+ from scipy.spatial import cKDTree
+
+ self.kdtree = cKDTree(X)
+
+ if self.params["implementation"] == "sklearn" and self.metric != "precomputed":
+ # FIXME: sklearn badly handles "precomputed"
+ from sklearn.neighbors import NearestNeighbors
+
+ nargs = {
+ k: v for k, v in self.params.items() if k in {"p", "n_jobs", "metric_params", "algorithm", "leaf_size"}
+ }
+ self.nn = NearestNeighbors(self.k, metric=self.metric, **nargs)
+ self.nn.fit(X)
+
+ if self.params["implementation"] == "hnsw":
+ import hnswlib
+
+ self.graph = hnswlib.Index("l2", len(X[0])) # Actually returns squared distances
+ self.graph.init_index(
+ len(X), **{k: v for k, v in self.params.items() if k in {"ef_construction", "M", "random_seed"}}
+ )
+ n = self.params.get("num_threads")
+ if n is None:
+ n = self.params.get("n_jobs", 1)
+ self.params["num_threads"] = n
+ self.graph.add_items(X, num_threads=n)
+
+ return self
+
+ def transform(self, X):
+ """
+ Args:
+ X (numpy.array): coordinates for query points, or distance matrix if metric is "precomputed".
+
+ Returns:
+ numpy.array: if return_index, an array of shape (len(X), k) with the indices (in the argument
+ of :func:`fit`) of the k nearest neighbors to the points of X. If return_distance, an array of the
+ same shape with the distances to those neighbors. If both, a tuple with the two arrays, in this order.
+ """
+ if self.params.get("enable_autodiff", False):
+ # pykeops does not support autodiff for kmin yet, but when it does in the future,
+ # we may want a special path.
+ import eagerpy as ep
+
+ save_return_index = self.return_index
+ self.return_index = True
+ self.return_distance = False
+ self.params["enable_autodiff"] = False
+ try:
+ newX = ep.astensor(X)
+ if self.params["implementation"] != "keops" or (
+ not isinstance(newX, ep.PyTorchTensor) and not isinstance(newX, ep.NumPyTensor)
+ ):
+ newX = newX.numpy()
+ else:
+ newX = newX.raw
+ neighbors = self.transform(newX)
+ finally:
+ self.return_index = save_return_index
+ self.return_distance = True
+ self.params["enable_autodiff"] = True
+ # We can implement more later as needed
+ assert self.metric == "minkowski"
+ p = self.params["p"]
+ Y = ep.astensor(self.ref_points)
+ neighbor_pts = Y[
+ neighbors,
+ ]
+ diff = neighbor_pts - X[:, None, :]
+ if isinstance(diff, ep.PyTorchTensor):
+ # https://github.com/jonasrauber/eagerpy/issues/6
+ distances = ep.astensor(diff.raw.norm(p, -1))
+ else:
+ distances = diff.norms.lp(p, -1)
+ if self.return_index:
+ return neighbors, distances.raw
+ else:
+ return distances.raw
+
+ metric = self.metric
+ k = self.k
+
+ if metric == "precomputed":
+ # scikit-learn could handle that, but they insist on calling fit() with an unused square array, which is too unnatural.
+ if self.return_index:
+ n_jobs = self.params.get("n_jobs", 1)
+ # Supposedly numpy can be compiled with OpenMP and handle this, but nobody does that?!
+ if n_jobs == 1:
+ neighbors = numpy.argpartition(X, k - 1)[:, 0:k]
+ if self.params.get("sort_results", True):
+ X = numpy.take_along_axis(X, neighbors, axis=-1)
+ ngb_order = numpy.argsort(X, axis=-1)
+ neighbors = numpy.take_along_axis(neighbors, ngb_order, axis=-1)
+ else:
+ ngb_order = neighbors
+ if self.return_distance:
+ distances = numpy.take_along_axis(X, ngb_order, axis=-1)
+ return neighbors, distances
+ else:
+ return neighbors
+ else:
+ from joblib import Parallel, delayed, effective_n_jobs
+ from sklearn.utils import gen_even_slices
+
+ slices = gen_even_slices(len(X), effective_n_jobs(-1))
+ parallel = Parallel(backend="threading", n_jobs=-1)
+ if self.params.get("sort_results", True):
+
+ def func(M):
+ neighbors = numpy.argpartition(M, k - 1)[:, 0:k]
+ Y = numpy.take_along_axis(M, neighbors, axis=-1)
+ ngb_order = numpy.argsort(Y, axis=-1)
+ return numpy.take_along_axis(neighbors, ngb_order, axis=-1)
+
+ else:
+
+ def func(M):
+ return numpy.argpartition(M, k - 1)[:, 0:k]
+
+ neighbors = numpy.concatenate(parallel(delayed(func)(X[s]) for s in slices))
+ if self.return_distance:
+ distances = numpy.take_along_axis(X, neighbors, axis=-1)
+ return neighbors, distances
+ else:
+ return neighbors
+ if self.return_distance:
+ n_jobs = self.params.get("n_jobs", 1)
+ if n_jobs == 1:
+ distances = numpy.partition(X, k - 1)[:, 0:k]
+ if self.params.get("sort_results"):
+ # partition is not guaranteed to sort the lower half, although it often does
+ distances.sort(axis=-1)
+ else:
+ from joblib import Parallel, delayed, effective_n_jobs
+ from sklearn.utils import gen_even_slices
+
+ if self.params.get("sort_results"):
+
+ def func(M):
+ # Not partitioning in place, because we should not modify the user's array?
+ r = numpy.partition(M, k - 1)[:, 0:k]
+ r.sort(axis=-1)
+ return r
+
+ else:
+ func = lambda M: numpy.partition(M, k - 1)[:, 0:k]
+ slices = gen_even_slices(len(X), effective_n_jobs(-1))
+ parallel = Parallel(backend="threading", n_jobs=-1)
+ distances = numpy.concatenate(parallel(delayed(func)(X[s]) for s in slices))
+ return distances
+ return None
+
+ if self.params["implementation"] == "hnsw":
+ ef = self.params.get("ef")
+ if ef is not None:
+ self.graph.set_ef(ef)
+ neighbors, distances = self.graph.knn_query(X, k, num_threads=self.params["num_threads"])
+ # The k nearest neighbors are always sorted. I couldn't find it in the doc, but the code calls searchKnn,
+ # which returns a priority_queue, and then fills the return array backwards with top/pop on the queue.
+ if self.return_index:
+ if self.return_distance:
+ return neighbors, numpy.sqrt(distances)
+ else:
+ return neighbors
+ if self.return_distance:
+ return numpy.sqrt(distances)
+ return None
+
+ if self.params["implementation"] == "keops":
+ import torch
+ from pykeops.torch import LazyTensor
+
+ # 'float64' is slow except on super expensive GPUs. Allow it with some param?
+ XX = torch.as_tensor(X, dtype=torch.float32)
+ if X is self.ref_points:
+ YY = XX
+ else:
+ YY = torch.as_tensor(self.ref_points, dtype=torch.float32)
+ p = self.params["p"]
+ if p == numpy.inf:
+ # Requires pykeops 1.4 or later
+ mat = (LazyTensor(XX[:, None, :]) - LazyTensor(YY[None, :, :])).abs().max(-1)
+ elif p == 2: # Any even integer?
+ mat = ((LazyTensor(XX[:, None, :]) - LazyTensor(YY[None, :, :])) ** p).sum(-1)
+ else:
+ mat = ((LazyTensor(XX[:, None, :]) - LazyTensor(YY[None, :, :])).abs() ** p).sum(-1)
+
+ if self.return_index:
+ if self.return_distance:
+ distances, neighbors = mat.Kmin_argKmin(k, dim=1)
+ if p != numpy.inf:
+ distances = distances ** (1.0 / p)
+ return neighbors, distances
+ else:
+ neighbors = mat.argKmin(k, dim=1)
+ return neighbors
+ if self.return_distance:
+ distances = mat.Kmin(k, dim=1)
+ if p != numpy.inf:
+ distances = distances ** (1.0 / p)
+ return distances
+ return None
+
+ if self.params["implementation"] == "ckdtree":
+ qargs = {key: val for key, val in self.params.items() if key in {"p", "eps", "n_jobs"}}
+ distances, neighbors = self.kdtree.query(X, k=self.k, **qargs)
+ if self.return_index:
+ if self.return_distance:
+ return neighbors, distances
+ else:
+ return neighbors
+ if self.return_distance:
+ return distances
+ return None
+
+ assert self.params["implementation"] == "sklearn"
+ if self.return_distance:
+ distances, neighbors = self.nn.kneighbors(X, return_distance=True)
+ if self.return_index:
+ return neighbors, distances
+ else:
+ return distances
+ if self.return_index:
+ neighbors = self.nn.kneighbors(X, return_distance=False)
+ return neighbors
+ return None
diff --git a/src/python/gudhi/representations/metrics.py b/src/python/gudhi/representations/metrics.py
index e2c30f8c..59440b1a 100644
--- a/src/python/gudhi/representations/metrics.py
+++ b/src/python/gudhi/representations/metrics.py
@@ -246,27 +246,23 @@ class BottleneckDistance(BaseEstimator, TransformerMixin):
Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric="bottleneck", e=self.epsilon)
return Xfit
-class WassersteinDistance(BaseEstimator, TransformerMixin):
+class PersistenceFisherDistance(BaseEstimator, TransformerMixin):
"""
- This is a class for computing the Wasserstein distance matrix from a list of persistence diagrams.
+ This is a class for computing the persistence Fisher distance matrix from a list of persistence diagrams. The persistence Fisher distance is obtained by computing the original Fisher distance between the probability distributions associated to the persistence diagrams given by convolving them with a Gaussian kernel. See http://papers.nips.cc/paper/8205-persistence-fisher-kernel-a-riemannian-manifold-kernel-for-persistence-diagrams for more details.
"""
- def __init__(self, order=2, internal_p=2, mode="pot", delta=0.01):
+ def __init__(self, bandwidth=1., kernel_approx=None):
"""
- Constructor for the WassersteinDistance class.
+ Constructor for the PersistenceFisherDistance class.
Parameters:
- order (int): exponent for Wasserstein, default value is 2., see :func:`gudhi.wasserstein.wasserstein_distance`.
- internal_p (int): ground metric on the (upper-half) plane (i.e. norm l_p in R^2), default value is 2 (euclidean norm), see :func:`gudhi.wasserstein.wasserstein_distance`.
- mode (str): method for computing Wasserstein distance. Either "pot" or "hera".
- delta (float): relative error 1+delta. Used only if mode == "hera".
+ bandwidth (double): bandwidth of the Gaussian kernel used to turn persistence diagrams into probability distributions (default 1.).
+ kernel_approx (class): kernel approximation class used to speed up computation (default None). Common kernel approximations classes can be found in the scikit-learn library (such as RBFSampler for instance).
"""
- self.order, self.internal_p, self.mode = order, internal_p, mode
- self.metric = "pot_wasserstein" if mode == "pot" else "hera_wasserstein"
- self.delta = delta
+ self.bandwidth, self.kernel_approx = bandwidth, kernel_approx
def fit(self, X, y=None):
"""
- Fit the WassersteinDistance class on a list of persistence diagrams: persistence diagrams are stored in a numpy array called **diagrams**.
+ Fit the PersistenceFisherDistance class on a list of persistence diagrams: persistence diagrams are stored in a numpy array called **diagrams** and the kernel approximation class (if not None) is applied on them.
Parameters:
X (list of n x 2 numpy arrays): input persistence diagrams.
@@ -277,37 +273,37 @@ class WassersteinDistance(BaseEstimator, TransformerMixin):
def transform(self, X):
"""
- Compute all Wasserstein distances between the persistence diagrams that were stored after calling the fit() method, and a given list of (possibly different) persistence diagrams.
+ Compute all persistence Fisher distances between the persistence diagrams that were stored after calling the fit() method, and a given list of (possibly different) persistence diagrams.
Parameters:
X (list of n x 2 numpy arrays): input persistence diagrams.
Returns:
- numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise Wasserstein distances.
+ numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise persistence Fisher distances.
"""
- if self.metric == "hera_wasserstein":
- Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p, delta=self.delta)
- else:
- Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p)
- return Xfit
+ return pairwise_persistence_diagram_distances(X, self.diagrams_, metric="persistence_fisher", bandwidth=self.bandwidth, kernel_approx=self.kernel_approx)
-class PersistenceFisherDistance(BaseEstimator, TransformerMixin):
+class WassersteinDistance(BaseEstimator, TransformerMixin):
"""
- This is a class for computing the persistence Fisher distance matrix from a list of persistence diagrams. The persistence Fisher distance is obtained by computing the original Fisher distance between the probability distributions associated to the persistence diagrams given by convolving them with a Gaussian kernel. See http://papers.nips.cc/paper/8205-persistence-fisher-kernel-a-riemannian-manifold-kernel-for-persistence-diagrams for more details.
+ This is a class for computing the Wasserstein distance matrix from a list of persistence diagrams.
"""
- def __init__(self, bandwidth=1., kernel_approx=None):
+ def __init__(self, order=2, internal_p=2, mode="pot", delta=0.01):
"""
- Constructor for the PersistenceFisherDistance class.
+ Constructor for the WassersteinDistance class.
Parameters:
- bandwidth (double): bandwidth of the Gaussian kernel used to turn persistence diagrams into probability distributions (default 1.).
- kernel_approx (class): kernel approximation class used to speed up computation (default None). Common kernel approximations classes can be found in the scikit-learn library (such as RBFSampler for instance).
+ order (int): exponent for Wasserstein, default value is 2., see :func:`gudhi.wasserstein.wasserstein_distance`.
+ internal_p (int): ground metric on the (upper-half) plane (i.e. norm l_p in R^2), default value is 2 (euclidean norm), see :func:`gudhi.wasserstein.wasserstein_distance`.
+ mode (str): method for computing Wasserstein distance. Either "pot" or "hera".
+ delta (float): relative error 1+delta. Used only if mode == "hera".
"""
- self.bandwidth, self.kernel_approx = bandwidth, kernel_approx
+ self.order, self.internal_p, self.mode = order, internal_p, mode
+ self.metric = "pot_wasserstein" if mode == "pot" else "hera_wasserstein"
+ self.delta = delta
def fit(self, X, y=None):
"""
- Fit the PersistenceFisherDistance class on a list of persistence diagrams: persistence diagrams are stored in a numpy array called **diagrams** and the kernel approximation class (if not None) is applied on them.
+ Fit the WassersteinDistance class on a list of persistence diagrams: persistence diagrams are stored in a numpy array called **diagrams**.
Parameters:
X (list of n x 2 numpy arrays): input persistence diagrams.
@@ -318,12 +314,16 @@ class PersistenceFisherDistance(BaseEstimator, TransformerMixin):
def transform(self, X):
"""
- Compute all persistence Fisher distances between the persistence diagrams that were stored after calling the fit() method, and a given list of (possibly different) persistence diagrams.
+ Compute all Wasserstein distances between the persistence diagrams that were stored after calling the fit() method, and a given list of (possibly different) persistence diagrams.
Parameters:
X (list of n x 2 numpy arrays): input persistence diagrams.
Returns:
- numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise persistence Fisher distances.
+ numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise Wasserstein distances.
"""
- return pairwise_persistence_diagram_distances(X, self.diagrams_, metric="persistence_fisher", bandwidth=self.bandwidth, kernel_approx=self.kernel_approx)
+ if self.metric == "hera_wasserstein":
+ Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p, delta=self.delta)
+ else:
+ Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p)
+ return Xfit
diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd
index 82f155de..5dea2449 100644
--- a/src/python/gudhi/simplex_tree.pxd
+++ b/src/python/gudhi/simplex_tree.pxd
@@ -48,8 +48,7 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
int dimension()
int upper_bound_dimension()
bool find_simplex(vector[int] simplex)
- bool insert_simplex_and_subfaces(vector[int] simplex,
- double filtration)
+ bool insert(vector[int] simplex, double filtration)
vector[pair[vector[int], double]] get_star(vector[int] simplex)
vector[pair[vector[int], double]] get_cofaces(vector[int] simplex,
int dimension)
@@ -57,6 +56,8 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
void remove_maximal_simplex(vector[int] simplex)
bool prune_above_filtration(double filtration)
bool make_filtration_non_decreasing()
+ void compute_extended_filtration()
+ vector[vector[pair[int, pair[double, double]]]] compute_extended_persistence_subdiagrams(vector[pair[int, pair[double, double]]] dgm, double min_persistence)
# Iterators over Simplex tree
pair[vector[int], double] get_simplex_and_filtration(Simplex_tree_simplex_handle f_simplex)
Simplex_tree_simplices_iterator get_simplices_iterator_begin()
@@ -69,9 +70,10 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>":
Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max)
- vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ void compute_persistence(int homology_coeff_field, double min_persistence)
+ vector[pair[int, pair[double, double]]] get_persistence()
vector[int] betti_numbers()
vector[int] persistent_betti_numbers(double from_value, double to_value)
vector[pair[double,double]] intervals_in_dimension(int dimension)
- void write_output_diagram(string diagram_file_name)
+ void write_output_diagram(string diagram_file_name) except +
vector[pair[vector[int], vector[int]]] persistence_pairs()
diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx
index c01cc905..9479118a 100644
--- a/src/python/gudhi/simplex_tree.pyx
+++ b/src/python/gudhi/simplex_tree.pyx
@@ -90,7 +90,7 @@ cdef class SimplexTree:
(with more :meth:`assign_filtration` or
:meth:`make_filtration_non_decreasing` for instance) before calling
any function that relies on the filtration property, like
- :meth:`initialize_filtration`.
+ :meth:`persistence`.
"""
self.get_ptr().assign_simplex_filtration(simplex, filtration)
@@ -98,16 +98,7 @@ cdef class SimplexTree:
"""This function initializes and sorts the simplicial complex
filtration vector.
- .. note::
-
- This function must be launched before
- :func:`persistence()<gudhi.SimplexTree.persistence>`,
- :func:`betti_numbers()<gudhi.SimplexTree.betti_numbers>`,
- :func:`persistent_betti_numbers()<gudhi.SimplexTree.persistent_betti_numbers>`,
- or :func:`get_filtration()<gudhi.SimplexTree.get_filtration>`
- after :func:`inserting<gudhi.SimplexTree.insert>` or
- :func:`removing<gudhi.SimplexTree.remove_maximal_simplex>`
- simplices.
+ .. deprecated:: 3.2.0
"""
self.get_ptr().initialize_filtration()
@@ -139,9 +130,9 @@ cdef class SimplexTree:
This function is not constant time because it can recompute
dimension if required (can be triggered by
- :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ :func:`remove_maximal_simplex`
or
- :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ :func:`prune_above_filtration`
methods).
"""
return self.get_ptr().dimension()
@@ -166,9 +157,9 @@ cdef class SimplexTree:
This function must be used with caution because it disables
dimension recomputation when required
(this recomputation can be triggered by
- :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ :func:`remove_maximal_simplex`
or
- :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ :func:`prune_above_filtration`
).
"""
self.get_ptr().set_dimension(<int>dimension)
@@ -182,10 +173,7 @@ cdef class SimplexTree:
:returns: true if the simplex was found, false otherwise.
:rtype: bool
"""
- cdef vector[int] csimplex
- for i in simplex:
- csimplex.push_back(i)
- return self.get_ptr().find_simplex(csimplex)
+ return self.get_ptr().find_simplex(simplex)
def insert(self, simplex, filtration=0.0):
"""This function inserts the given N-simplex and its subfaces with the
@@ -202,11 +190,7 @@ cdef class SimplexTree:
otherwise (whatever its original filtration value).
:rtype: bool
"""
- cdef vector[int] csimplex
- for i in simplex:
- csimplex.push_back(i)
- return self.get_ptr().insert_simplex_and_subfaces(csimplex,
- <double>filtration)
+ return self.get_ptr().insert(simplex, <double>filtration)
def get_simplices(self):
"""This function returns a generator with simplices and their given
@@ -308,17 +292,12 @@ cdef class SimplexTree:
.. note::
- Be aware that removing is shifting data in a flat_map
- (:func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>` to be done).
-
- .. note::
-
The dimension of the simplicial complex may be lower after calling
remove_maximal_simplex than it was before. However,
- :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ :func:`upper_bound_dimension`
method will return the old value, which
remains a valid upper bound. If you care, you can call
- :func:`dimension()<gudhi.SimplexTree.dimension>`
+ :func:`dimension`
to recompute the exact dimension.
"""
self.get_ptr().remove_maximal_simplex(simplex)
@@ -334,24 +313,14 @@ cdef class SimplexTree:
.. note::
- Some simplex tree functions require the filtration to be valid.
- prune_above_filtration function is not launching
- :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
- but returns the filtration modification
- information. If the complex has changed , please call
- :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
- to recompute it.
-
- .. note::
-
Note that the dimension of the simplicial complex may be lower
after calling
- :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ :func:`prune_above_filtration`
than it was before. However,
- :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ :func:`upper_bound_dimension`
will return the old value, which remains a
valid upper bound. If you care, you can call
- :func:`dimension()<gudhi.SimplexTree.dimension>`
+ :func:`dimension`
method to recompute the exact dimension.
"""
return self.get_ptr().prune_above_filtration(filtration)
@@ -382,22 +351,63 @@ cdef class SimplexTree:
:returns: True if any filtration value was modified,
False if the filtration was already non-decreasing.
:rtype: bool
+ """
+ return self.get_ptr().make_filtration_non_decreasing()
+ def extend_filtration(self):
+ """ Extend filtration for computing extended persistence. This function only uses the
+ filtration values at the 0-dimensional simplices, and computes the extended persistence
+ diagram induced by the lower-star filtration computed with these values.
.. note::
- Some simplex tree functions require the filtration to be valid.
- make_filtration_non_decreasing function is not launching
- :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
- but returns the filtration modification
- information. If the complex has changed , please call
- :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
- to recompute it.
+ Note that after calling this function, the filtration
+ values are actually modified within the Simplex_tree.
+ The function :func:`extended_persistence`
+ retrieves the original values.
+
+ .. note::
+
+ Note that this code creates an extra vertex internally, so you should make sure that
+ the Simplex_tree does not contain a vertex with the largest possible value (i.e., 4294967295).
"""
- return self.get_ptr().make_filtration_non_decreasing()
+ self.get_ptr().compute_extended_filtration()
+
+ def extended_persistence(self, homology_coeff_field=11, min_persistence=0):
+ """This function retrieves good values for extended persistence, and separate the diagrams
+ into the Ordinary, Relative, Extended+ and Extended- subdiagrams.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number. Default value is 11.
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value (i.e., the absolute value of the difference between the persistence diagram point coordinates) to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: A list of four persistence diagrams in the format described in :func:`persistence`. The first one is Ordinary, the second one is Relative, the third one is Extended+ and the fourth one is Extended-. See https://link.springer.com/article/10.1007/s10208-008-9027-z and/or section 2.2 in https://link.springer.com/article/10.1007/s10208-017-9370-z for a description of these subtypes.
+
+ .. note::
+
+ This function should be called only if :func:`extend_filtration` has been called first!
+
+ .. note::
+
+ The coordinates of the persistence diagram points might be a little different than the
+ original filtration values due to the internal transformation (scaling to [-2,-1]) that is
+ performed on these values during the computation of extended persistence.
+ """
+ cdef vector[pair[int, pair[double, double]]] persistence_result
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ self.pcohptr = new Simplex_tree_persistence_interface(self.get_ptr(), False)
+ self.pcohptr.compute_persistence(homology_coeff_field, -1.)
+ persistence_result = self.pcohptr.get_persistence()
+ return self.get_ptr().compute_extended_persistence_subdiagrams(persistence_result, min_persistence)
+
def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False):
- """This function returns the persistence of the simplicial complex.
+ """This function computes and returns the persistence of the simplicial complex.
:param homology_coeff_field: The homology coefficient field. Must be a
prime number. Default value is 11.
@@ -414,13 +424,32 @@ cdef class SimplexTree:
:returns: The persistence of the simplicial complex.
:rtype: list of pairs(dimension, pair(birth, death))
"""
+ self.compute_persistence(homology_coeff_field, min_persistence, persistence_dim_max)
+ return self.pcohptr.get_persistence()
+
+ def compute_persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False):
+ """This function computes the persistence of the simplicial complex, so it can be accessed through
+ :func:`persistent_betti_numbers`, :func:`persistence_pairs`, etc. This function is equivalent to :func:`persistence`
+ when you do not want the list :func:`persistence` returns.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number. Default value is 11.
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :param persistence_dim_max: If true, the persistent homology for the
+ maximal dimension in the complex is computed. If false, it is
+ ignored. Default is false.
+ :type persistence_dim_max: bool
+ :returns: Nothing.
+ """
if self.pcohptr != NULL:
del self.pcohptr
self.pcohptr = new Simplex_tree_persistence_interface(self.get_ptr(), persistence_dim_max)
- cdef vector[pair[int, pair[double, double]]] persistence_result
- if self.pcohptr != NULL:
- persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
- return persistence_result
+ self.pcohptr.compute_persistence(homology_coeff_field, min_persistence)
def betti_numbers(self):
"""This function returns the Betti numbers of the simplicial complex.
@@ -429,16 +458,11 @@ cdef class SimplexTree:
:rtype: list of int
:note: betti_numbers function requires
- :func:`persistence()<gudhi.SimplexTree.persistence>`
+ :func:`compute_persistence`
function to be launched first.
"""
- cdef vector[int] bn_result
- if self.pcohptr != NULL:
- bn_result = self.pcohptr.betti_numbers()
- else:
- print("betti_numbers function requires persistence function"
- " to be launched first.")
- return bn_result
+ assert self.pcohptr != NULL, "compute_persistence() must be called before betti_numbers()"
+ return self.pcohptr.betti_numbers()
def persistent_betti_numbers(self, from_value, to_value):
"""This function returns the persistent Betti numbers of the
@@ -455,16 +479,11 @@ cdef class SimplexTree:
:rtype: list of int
:note: persistent_betti_numbers function requires
- :func:`persistence()<gudhi.SimplexTree.persistence>`
+ :func:`compute_persistence`
function to be launched first.
"""
- cdef vector[int] pbn_result
- if self.pcohptr != NULL:
- pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
- else:
- print("persistent_betti_numbers function requires persistence function"
- " to be launched first.")
- return pbn_result
+ assert self.pcohptr != NULL, "compute_persistence() must be called before persistent_betti_numbers()"
+ return self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
def persistence_intervals_in_dimension(self, dimension):
"""This function returns the persistence intervals of the simplicial
@@ -476,16 +495,11 @@ cdef class SimplexTree:
:rtype: numpy array of dimension 2
:note: intervals_in_dim function requires
- :func:`persistence()<gudhi.SimplexTree.persistence>`
+ :func:`compute_persistence`
function to be launched first.
"""
- cdef vector[pair[double,double]] intervals_result
- if self.pcohptr != NULL:
- intervals_result = self.pcohptr.intervals_in_dimension(dimension)
- else:
- print("intervals_in_dim function requires persistence function"
- " to be launched first.")
- return np_array(intervals_result)
+ assert self.pcohptr != NULL, "compute_persistence() must be called before persistence_intervals_in_dimension()"
+ return np_array(self.pcohptr.intervals_in_dimension(dimension))
def persistence_pairs(self):
"""This function returns a list of persistence birth and death simplices pairs.
@@ -494,33 +508,22 @@ cdef class SimplexTree:
:rtype: list of pair of list of int
:note: persistence_pairs function requires
- :func:`persistence()<gudhi.SimplexTree.persistence>`
+ :func:`compute_persistence`
function to be launched first.
"""
- cdef vector[pair[vector[int],vector[int]]] persistence_pairs_result
- if self.pcohptr != NULL:
- persistence_pairs_result = self.pcohptr.persistence_pairs()
- else:
- print("persistence_pairs function requires persistence function"
- " to be launched first.")
- return persistence_pairs_result
+ assert self.pcohptr != NULL, "compute_persistence() must be called before persistence_pairs()"
+ return self.pcohptr.persistence_pairs()
- def write_persistence_diagram(self, persistence_file=''):
+ def write_persistence_diagram(self, persistence_file):
"""This function writes the persistence intervals of the simplicial
complex in a user given file name.
- :param persistence_file: The specific dimension.
+ :param persistence_file: Name of the file.
:type persistence_file: string.
:note: intervals_in_dim function requires
- :func:`persistence()<gudhi.SimplexTree.persistence>`
+ :func:`compute_persistence`
function to be launched first.
"""
- if self.pcohptr != NULL:
- if persistence_file != '':
- self.pcohptr.write_output_diagram(persistence_file.encode('utf-8'))
- else:
- print("persistence_file must be specified")
- else:
- print("intervals_in_dim function requires persistence function"
- " to be launched first.")
+ assert self.pcohptr != NULL, "compute_persistence() must be called before write_persistence_diagram()"
+ self.pcohptr.write_output_diagram(persistence_file.encode('utf-8'))
diff --git a/src/python/gudhi/wasserstein/__init__.py b/src/python/gudhi/wasserstein/__init__.py
new file mode 100644
index 00000000..ed225ba4
--- /dev/null
+++ b/src/python/gudhi/wasserstein/__init__.py
@@ -0,0 +1 @@
+from .wasserstein import wasserstein_distance
diff --git a/src/python/gudhi/wasserstein/barycenter.py b/src/python/gudhi/wasserstein/barycenter.py
new file mode 100644
index 00000000..de7aea81
--- /dev/null
+++ b/src/python/gudhi/wasserstein/barycenter.py
@@ -0,0 +1,159 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Theo Lacombe
+#
+# Copyright (C) 2019 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
+
+import ot
+import numpy as np
+import scipy.spatial.distance as sc
+
+from gudhi.wasserstein import wasserstein_distance
+
+
+def _mean(x, m):
+ '''
+ :param x: a list of 2D-points, off diagonal, x_0... x_{k-1}
+ :param m: total amount of points taken into account,
+ that is we have (m-k) copies of diagonal
+ :returns: the weighted mean of x with (m-k) copies of the diagonal
+ '''
+ k = len(x)
+ if k > 0:
+ w = np.mean(x, axis=0)
+ w_delta = (w[0] + w[1]) / 2 * np.ones(2)
+ return (k * w + (m-k) * w_delta) / m
+ else:
+ return np.array([0, 0])
+
+
+def lagrangian_barycenter(pdiagset, init=None, verbose=False):
+ '''
+ :param pdiagset: a list of ``numpy.array`` of shape `(n x 2)`
+ (`n` can variate), encoding a set of
+ persistence diagrams with only finite coordinates.
+ :param init: The initial value for barycenter estimate.
+ If ``None``, init is made on a random diagram from the dataset.
+ Otherwise, it can be an ``int``
+ (then initialization is made on ``pdiagset[init]``)
+ or a `(n x 2)` ``numpy.array`` enconding
+ a persistence diagram with `n` points.
+ :type init: ``int``, or (n x 2) ``np.array``
+ :param verbose: if ``True``, returns additional information about the
+ barycenter.
+ :type verbose: boolean
+ :returns: If not verbose (default), a ``numpy.array`` encoding
+ the barycenter estimate of pdiagset
+ (local minimum of the energy function).
+ If ``pdiagset`` is empty, returns ``None``.
+ If verbose, returns a couple ``(Y, log)``
+ where ``Y`` is the barycenter estimate,
+ and ``log`` is a ``dict`` that contains additional informations:
+
+ - `"groupings"`, a list of list of pairs ``(i,j)``.
+ Namely, ``G[k] = [...(i, j)...]``, where ``(i,j)`` indicates
+ that ``pdiagset[k][i]`` is matched to ``Y[j]``
+ if ``i = -1`` or ``j = -1``, it means they
+ represent the diagonal.
+
+ - `"energy"`, ``float`` representing the Frechet energy value obtained.
+ It is the mean of squared distances of observations to the output.
+
+ - `"nb_iter"`, ``int`` number of iterations performed before convergence of the algorithm.
+ '''
+ X = pdiagset # to shorten notations, not a copy
+ m = len(X) # number of diagrams we are averaging
+ if m == 0:
+ print("Warning: computing barycenter of empty diag set. Returns None")
+ return None
+
+ # store the number of off-diagonal point for each of the X_i
+ nb_off_diag = np.array([len(X_i) for X_i in X])
+ # Initialisation of barycenter
+ if init is None:
+ i0 = np.random.randint(m) # Index of first state for the barycenter
+ Y = X[i0].copy()
+ else:
+ if type(init)==int:
+ Y = X[init].copy()
+ else:
+ Y = init.copy()
+
+ nb_iter = 0
+
+ converged = False # stoping criterion
+ while not converged:
+ nb_iter += 1
+ K = len(Y) # current nb of points in Y (some might be on diagonal)
+ G = np.full((K, m), -1, dtype=int) # will store for each j, the (index)
+ # point matched in each other diagram
+ #(might be the diagonal).
+ # that is G[j, i] = k <=> y_j is matched to
+ # x_k in the diagram i-th diagram X[i]
+ updated_points = np.zeros((K, 2)) # will store the new positions of
+ # the points of Y.
+ # If points disappear, there thrown
+ # on [0,0] by default.
+ new_created_points = [] # will store potential new points.
+
+ # Step 1 : compute optimal matching (Y, X_i) for each X_i
+ # and create new points in Y if needed
+ for i in range(m):
+ _, indices = wasserstein_distance(Y, X[i], matching=True, order=2., internal_p=2.)
+ for y_j, x_i_j in indices:
+ if y_j >= 0: # we matched an off diagonal point to x_i_j...
+ if x_i_j >= 0: # ...which is also an off-diagonal point.
+ G[y_j, i] = x_i_j
+ else: # ...which is a diagonal point
+ G[y_j, i] = -1 # -1 stands for the diagonal (mask)
+ else: # We matched a diagonal point to x_i_j...
+ if x_i_j >= 0: # which is a off-diag point !
+ # need to create new point in Y
+ new_y = _mean(np.array([X[i][x_i_j]]), m)
+ # Average this point with (m-1) copies of Delta
+ new_created_points.append(new_y)
+
+ # Step 2 : Update current point position thanks to groupings computed
+ to_delete = []
+ for j in range(K):
+ matched_points = [X[i][G[j, i]] for i in range(m) if G[j, i] > -1]
+ new_y_j = _mean(matched_points, m)
+ if not np.array_equal(new_y_j, np.array([0,0])):
+ updated_points[j] = new_y_j
+ else: # this points is no longer of any use.
+ to_delete.append(j)
+ # we remove the point to be deleted now.
+ updated_points = np.delete(updated_points, to_delete, axis=0)
+
+ # we cannot converge if there have been new created points.
+ if new_created_points:
+ Y = np.concatenate((updated_points, new_created_points))
+ else:
+ # Step 3 : we check convergence
+ if np.array_equal(updated_points, Y):
+ converged = True
+ Y = updated_points
+
+
+ if verbose:
+ groupings = []
+ energy = 0
+ log = {}
+ n_y = len(Y)
+ for i in range(m):
+ cost, edges = wasserstein_distance(Y, X[i], matching=True, order=2., internal_p=2.)
+ groupings.append(edges)
+ energy += cost
+ log["groupings"] = groupings
+ energy = energy/m
+ log["energy"] = energy
+ log["nb_iter"] = nb_iter
+
+ return Y, log
+ else:
+ return Y
+
diff --git a/src/python/gudhi/wasserstein.py b/src/python/gudhi/wasserstein/wasserstein.py
index 3dd993f9..efc851a0 100644
--- a/src/python/gudhi/wasserstein.py
+++ b/src/python/gudhi/wasserstein/wasserstein.py
@@ -9,11 +9,14 @@
import numpy as np
import scipy.spatial.distance as sc
+
try:
import ot
except ImportError:
print("POT (Python Optimal Transport) package is not installed. Try to run $ conda install -c conda-forge pot ; or $ pip install POT")
+
+# Currently unused, but Théo says it is likely to be used again.
def _proj_on_diag(X):
'''
:param X: (n x 2) array encoding the points of a persistent diagram.
@@ -23,28 +26,36 @@ def _proj_on_diag(X):
return np.array([Z , Z]).T
-def _build_dist_matrix(X, Y, order=2., internal_p=2.):
+def _dist_to_diag(X, internal_p):
+ '''
+ :param X: (n x 2) array encoding the points of a persistent diagram.
+ :param internal_p: Ground metric (i.e. norm L^p).
+ :returns: (n) array encoding the (respective orthogonal) distances of the points to the diagonal
+
+ .. note::
+ Assumes that the points are above the diagonal.
+ '''
+ return (X[:, 1] - X[:, 0]) * 2 ** (1.0 / internal_p - 1)
+
+
+def _build_dist_matrix(X, Y, order, internal_p):
'''
:param X: (n x 2) numpy.array encoding the (points of the) first diagram.
:param Y: (m x 2) numpy.array encoding the second diagram.
:param order: exponent for the Wasserstein metric.
:param internal_p: Ground metric (i.e. norm L^p).
- :returns: (n+1) x (m+1) np.array encoding the cost matrix C.
- For 0 <= i < n, 0 <= j < m, C[i,j] encodes the distance between X[i] and Y[j],
- while C[i, m] (resp. C[n, j]) encodes the distance (to the p) between X[i] (resp Y[j])
+ :returns: (n+1) x (m+1) np.array encoding the cost matrix C.
+ For 0 <= i < n, 0 <= j < m, C[i,j] encodes the distance between X[i] and Y[j],
+ while C[i, m] (resp. C[n, j]) encodes the distance (to the p) between X[i] (resp Y[j])
and its orthogonal projection onto the diagonal.
note also that C[n, m] = 0 (it costs nothing to move from the diagonal to the diagonal).
'''
- Xdiag = _proj_on_diag(X)
- Ydiag = _proj_on_diag(Y)
+ Cxd = _dist_to_diag(X, internal_p)**order
+ Cdy = _dist_to_diag(Y, internal_p)**order
if np.isinf(internal_p):
C = sc.cdist(X,Y, metric='chebyshev')**order
- Cxd = np.linalg.norm(X - Xdiag, ord=internal_p, axis=1)**order
- Cdy = np.linalg.norm(Y - Ydiag, ord=internal_p, axis=1)**order
else:
C = sc.cdist(X,Y, metric='minkowski', p=internal_p)**order
- Cxd = np.linalg.norm(X - Xdiag, ord=internal_p, axis=1)**order
- Cdy = np.linalg.norm(Y - Ydiag, ord=internal_p, axis=1)**order
Cf = np.hstack((C, Cxd[:,None]))
Cdy = np.append(Cdy, 0)
@@ -58,24 +69,23 @@ def _perstot(X, order, internal_p):
:param X: (n x 2) numpy.array (points of a given diagram).
:param order: exponent for Wasserstein. Default value is 2.
:param internal_p: Ground metric on the (upper-half) plane (i.e. norm L^p in R^2); Default value is 2 (Euclidean norm).
- :returns: float, the total persistence of the diagram (that is, its distance to the empty diagram).
+ :returns: float, the total persistence of the diagram (that is, its distance to the empty diagram).
'''
- Xdiag = _proj_on_diag(X)
- return (np.sum(np.linalg.norm(X - Xdiag, ord=internal_p, axis=1)**order))**(1./order)
+ return np.linalg.norm(_dist_to_diag(X, internal_p), ord=order)
def wasserstein_distance(X, Y, matching=False, order=2., internal_p=2.):
'''
- :param X: (n x 2) numpy.array encoding the (finite points of the) first diagram. Must not contain essential points
+ :param X: (n x 2) numpy.array encoding the (finite points of the) first diagram. Must not contain essential points
(i.e. with infinite coordinate).
:param Y: (m x 2) numpy.array encoding the second diagram.
:param matching: if True, computes and returns the optimal matching between X and Y, encoded as
a (n x 2) np.array [...[i,j]...], meaning the i-th point in X is matched to
the j-th point in Y, with the convention (-1) represents the diagonal.
:param order: exponent for Wasserstein; Default value is 2.
- :param internal_p: Ground metric on the (upper-half) plane (i.e. norm L^p in R^2);
+ :param internal_p: Ground metric on the (upper-half) plane (i.e. norm L^p in R^2);
Default value is 2 (Euclidean norm).
- :returns: the Wasserstein distance of order q (1 <= q < infinity) between persistence diagrams with
+ :returns: the Wasserstein distance of order q (1 <= q < infinity) between persistence diagrams with
respect to the internal_p-norm as ground metric.
If matching is set to True, also returns the optimal matching between X and Y.
'''
diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h
index 8614eee3..40de88f3 100644
--- a/src/python/include/Alpha_complex_interface.h
+++ b/src/python/include/Alpha_complex_interface.h
@@ -58,7 +58,6 @@ class Alpha_complex_interface {
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square) {
alpha_complex_->create_complex(*simplex_tree, max_alpha_square);
- simplex_tree->initialize_filtration();
}
private:
diff --git a/src/python/include/Euclidean_strong_witness_complex_interface.h b/src/python/include/Euclidean_strong_witness_complex_interface.h
index c1c72737..f94c51ef 100644
--- a/src/python/include/Euclidean_strong_witness_complex_interface.h
+++ b/src/python/include/Euclidean_strong_witness_complex_interface.h
@@ -50,12 +50,10 @@ class Euclidean_strong_witness_complex_interface {
void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square,
std::size_t limit_dimension) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
- simplex_tree->initialize_filtration();
}
void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square);
- simplex_tree->initialize_filtration();
}
std::vector<double> get_point(unsigned vh) {
diff --git a/src/python/include/Euclidean_witness_complex_interface.h b/src/python/include/Euclidean_witness_complex_interface.h
index 5d7dbdc2..4411ae79 100644
--- a/src/python/include/Euclidean_witness_complex_interface.h
+++ b/src/python/include/Euclidean_witness_complex_interface.h
@@ -49,12 +49,10 @@ class Euclidean_witness_complex_interface {
void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square, std::size_t limit_dimension) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
- simplex_tree->initialize_filtration();
}
void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square);
- simplex_tree->initialize_filtration();
}
std::vector<double> get_point(unsigned vh) {
diff --git a/src/python/include/Nerve_gic_interface.h b/src/python/include/Nerve_gic_interface.h
index 5e7f8ae6..ab14c318 100644
--- a/src/python/include/Nerve_gic_interface.h
+++ b/src/python/include/Nerve_gic_interface.h
@@ -29,7 +29,6 @@ class Nerve_gic_interface : public Cover_complex<std::vector<double>> {
public:
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree) {
create_complex(*simplex_tree);
- simplex_tree->initialize_filtration();
}
void set_cover_from_Euclidean_Voronoi(int m) {
set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m);
diff --git a/src/python/include/Persistent_cohomology_interface.h b/src/python/include/Persistent_cohomology_interface.h
index 8c79e6f3..e2b69a52 100644
--- a/src/python/include/Persistent_cohomology_interface.h
+++ b/src/python/include/Persistent_cohomology_interface.h
@@ -23,6 +23,7 @@ template<class FilteredComplex>
class Persistent_cohomology_interface : public
persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomology::Field_Zp> {
private:
+ typedef persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomology::Field_Zp> Base;
/*
* Compare two intervals by dimension, then by length.
*/
@@ -42,26 +43,20 @@ persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomol
};
public:
- Persistent_cohomology_interface(FilteredComplex* stptr)
- : persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomology::Field_Zp>(*stptr),
- stptr_(stptr) { }
-
- Persistent_cohomology_interface(FilteredComplex* stptr, bool persistence_dim_max)
- : persistent_cohomology::Persistent_cohomology<FilteredComplex,
- persistent_cohomology::Field_Zp>(*stptr, persistence_dim_max),
+ Persistent_cohomology_interface(FilteredComplex* stptr, bool persistence_dim_max=false)
+ : Base(*stptr, persistence_dim_max),
stptr_(stptr) { }
- std::vector<std::pair<int, std::pair<double, double>>> get_persistence(int homology_coeff_field,
- double min_persistence) {
- persistent_cohomology::Persistent_cohomology<FilteredComplex,
- persistent_cohomology::Field_Zp>::init_coefficients(homology_coeff_field);
- persistent_cohomology::Persistent_cohomology<FilteredComplex,
- persistent_cohomology::Field_Zp>::compute_persistent_cohomology(min_persistence);
+ // TODO: move to the constructors?
+ void compute_persistence(int homology_coeff_field, double min_persistence) {
+ Base::init_coefficients(homology_coeff_field);
+ Base::compute_persistent_cohomology(min_persistence);
+ }
+ std::vector<std::pair<int, std::pair<double, double>>> get_persistence() {
// Custom sort and output persistence
cmp_intervals_by_dim_then_length cmp(stptr_);
- auto persistent_pairs = persistent_cohomology::Persistent_cohomology<FilteredComplex,
- persistent_cohomology::Field_Zp>::get_persistent_pairs();
+ auto persistent_pairs = Base::get_persistent_pairs();
std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp);
std::vector<std::pair<int, std::pair<double, double>>> persistence;
@@ -74,8 +69,7 @@ persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomol
}
std::vector<std::pair<std::vector<int>, std::vector<int>>> persistence_pairs() {
- auto pairs = persistent_cohomology::Persistent_cohomology<FilteredComplex,
- persistent_cohomology::Field_Zp>::get_persistent_pairs();
+ auto pairs = Base::get_persistent_pairs();
std::vector<std::pair<std::vector<int>, std::vector<int>>> persistence_pairs;
persistence_pairs.reserve(pairs.size());
diff --git a/src/python/include/Rips_complex_interface.h b/src/python/include/Rips_complex_interface.h
index a66b0e5b..d98b0226 100644
--- a/src/python/include/Rips_complex_interface.h
+++ b/src/python/include/Rips_complex_interface.h
@@ -53,7 +53,6 @@ class Rips_complex_interface {
rips_complex_->create_complex(*simplex_tree, dim_max);
else
sparse_rips_complex_->create_complex(*simplex_tree, dim_max);
- simplex_tree->initialize_filtration();
}
private:
diff --git a/src/python/include/Simplex_tree_interface.h b/src/python/include/Simplex_tree_interface.h
index 4a7062d6..56d7c41d 100644
--- a/src/python/include/Simplex_tree_interface.h
+++ b/src/python/include/Simplex_tree_interface.h
@@ -16,8 +16,6 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Points_off_io.h>
-#include "Persistent_cohomology_interface.h"
-
#include <iostream>
#include <vector>
#include <utility> // std::pair
@@ -37,18 +35,25 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
using Filtered_simplices = std::vector<Simplex_and_filtration>;
using Skeleton_simplex_iterator = typename Base::Skeleton_simplex_iterator;
using Complex_simplex_iterator = typename Base::Complex_simplex_iterator;
+ using Extended_filtration_data = typename Base::Extended_filtration_data;
public:
- bool find_simplex(const Simplex& vh) {
- return (Base::find(vh) != Base::null_simplex());
+
+ Extended_filtration_data efd;
+
+ bool find_simplex(const Simplex& simplex) {
+ return (Base::find(simplex) != Base::null_simplex());
}
- void assign_simplex_filtration(const Simplex& vh, Filtration_value filtration) {
- Base::assign_filtration(Base::find(vh), filtration);
+ void assign_simplex_filtration(const Simplex& simplex, Filtration_value filtration) {
+ Base::assign_filtration(Base::find(simplex), filtration);
+ Base::clear_filtration();
}
bool insert(const Simplex& simplex, Filtration_value filtration = 0) {
Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration);
+ if (result.first != Base::null_simplex())
+ Base::clear_filtration();
return (result.second);
}
@@ -82,7 +87,7 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
void remove_maximal_simplex(const Simplex& simplex) {
Base::remove_maximal_simplex(Base::find(simplex));
- Base::initialize_filtration();
+ Base::clear_filtration();
}
Simplex_and_filtration get_simplex_and_filtration(Simplex_handle f_simplex) {
@@ -117,9 +122,39 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
return cofaces;
}
- void create_persistence(Gudhi::Persistent_cohomology_interface<Base>* pcoh) {
- Base::initialize_filtration();
- pcoh = new Gudhi::Persistent_cohomology_interface<Base>(*this);
+ void compute_extended_filtration() {
+ this->efd = this->extend_filtration();
+ return;
+ }
+
+ std::vector<std::vector<std::pair<int, std::pair<Filtration_value, Filtration_value>>>> compute_extended_persistence_subdiagrams(const std::vector<std::pair<int, std::pair<Filtration_value, Filtration_value>>>& dgm, Filtration_value min_persistence){
+ std::vector<std::vector<std::pair<int, std::pair<Filtration_value, Filtration_value>>>> new_dgm(4);
+ for (unsigned int i = 0; i < dgm.size(); i++){
+ std::pair<Filtration_value, Extended_simplex_type> px = this->decode_extended_filtration(dgm[i].second.first, this->efd);
+ std::pair<Filtration_value, Extended_simplex_type> py = this->decode_extended_filtration(dgm[i].second.second, this->efd);
+ std::pair<int, std::pair<Filtration_value, Filtration_value>> pd_point = std::make_pair(dgm[i].first, std::make_pair(px.first, py.first));
+ if(std::abs(px.first - py.first) > min_persistence){
+ //Ordinary
+ if (px.second == Extended_simplex_type::UP && py.second == Extended_simplex_type::UP){
+ new_dgm[0].push_back(pd_point);
+ }
+ // Relative
+ else if (px.second == Extended_simplex_type::DOWN && py.second == Extended_simplex_type::DOWN){
+ new_dgm[1].push_back(pd_point);
+ }
+ else{
+ // Extended+
+ if (px.first < py.first){
+ new_dgm[2].push_back(pd_point);
+ }
+ //Extended-
+ else{
+ new_dgm[3].push_back(pd_point);
+ }
+ }
+ }
+ }
+ return new_dgm;
}
// Iterator over the simplex tree
diff --git a/src/python/include/Strong_witness_complex_interface.h b/src/python/include/Strong_witness_complex_interface.h
index cda5b514..e9ab0c7b 100644
--- a/src/python/include/Strong_witness_complex_interface.h
+++ b/src/python/include/Strong_witness_complex_interface.h
@@ -41,13 +41,11 @@ class Strong_witness_complex_interface {
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
std::size_t limit_dimension) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
- simplex_tree->initialize_filtration();
}
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree,
double max_alpha_square) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square);
- simplex_tree->initialize_filtration();
}
private:
diff --git a/src/python/include/Tangential_complex_interface.h b/src/python/include/Tangential_complex_interface.h
index 698226cc..b1afce94 100644
--- a/src/python/include/Tangential_complex_interface.h
+++ b/src/python/include/Tangential_complex_interface.h
@@ -90,7 +90,6 @@ class Tangential_complex_interface {
void create_simplex_tree(Simplex_tree<>* simplex_tree) {
tangential_complex_->create_complex<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>(*simplex_tree);
- simplex_tree->initialize_filtration();
}
void set_max_squared_edge_length(double max_squared_edge_length) {
diff --git a/src/python/include/Witness_complex_interface.h b/src/python/include/Witness_complex_interface.h
index 45e14253..76947e53 100644
--- a/src/python/include/Witness_complex_interface.h
+++ b/src/python/include/Witness_complex_interface.h
@@ -41,13 +41,11 @@ class Witness_complex_interface {
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
std::size_t limit_dimension) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
- simplex_tree->initialize_filtration();
}
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree,
double max_alpha_square) {
witness_complex_->create_complex(*simplex_tree, max_alpha_square);
- simplex_tree->initialize_filtration();
}
private:
diff --git a/src/python/test/test_cover_complex.py b/src/python/test/test_cover_complex.py
index 32bc5a26..260f6a5c 100755
--- a/src/python/test/test_cover_complex.py
+++ b/src/python/test/test_cover_complex.py
@@ -9,6 +9,7 @@
"""
from gudhi import CoverComplex
+import pytest
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2018 Inria"
@@ -24,7 +25,8 @@ def test_empty_constructor():
def test_non_existing_file_read():
# Try to open a non existing file
cover = CoverComplex()
- assert cover.read_point_cloud("pouetpouettralala.toubiloubabdou") == False
+ with pytest.raises(FileNotFoundError):
+ cover.read_point_cloud("pouetpouettralala.toubiloubabdou")
def test_files_creation():
diff --git a/src/python/test/test_cubical_complex.py b/src/python/test/test_cubical_complex.py
index 8c1b2600..fce4875c 100755
--- a/src/python/test/test_cubical_complex.py
+++ b/src/python/test/test_cubical_complex.py
@@ -10,6 +10,7 @@
from gudhi import CubicalComplex, PeriodicCubicalComplex
import numpy as np
+import pytest
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
@@ -25,9 +26,8 @@ def test_empty_constructor():
def test_non_existing_perseus_file_constructor():
# Try to open a non existing file
- cub = CubicalComplex(perseus_file="pouetpouettralala.toubiloubabdou")
- assert cub.__is_defined() == False
- assert cub.__is_persistence_defined() == False
+ with pytest.raises(FileNotFoundError):
+ cub = CubicalComplex(perseus_file="pouetpouettralala.toubiloubabdou")
def test_dimension_or_perseus_file_constructor():
diff --git a/src/python/test/test_dtm.py b/src/python/test/test_dtm.py
new file mode 100755
index 00000000..bff4c267
--- /dev/null
+++ b/src/python/test/test_dtm.py
@@ -0,0 +1,68 @@
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Marc Glisse
+
+ Copyright (C) 2020 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+from gudhi.point_cloud.dtm import DistanceToMeasure
+import numpy
+import pytest
+import torch
+
+
+def test_dtm_compare_euclidean():
+ pts = numpy.random.rand(1000, 4)
+ k = 6
+ dtm = DistanceToMeasure(k, implementation="ckdtree")
+ r0 = dtm.fit_transform(pts)
+ dtm = DistanceToMeasure(k, implementation="sklearn")
+ r1 = dtm.fit_transform(pts)
+ assert r1 == pytest.approx(r0)
+ dtm = DistanceToMeasure(k, implementation="sklearn", algorithm="brute")
+ r2 = dtm.fit_transform(pts)
+ assert r2 == pytest.approx(r0)
+ dtm = DistanceToMeasure(k, implementation="hnsw")
+ r3 = dtm.fit_transform(pts)
+ assert r3 == pytest.approx(r0, rel=0.1)
+ from scipy.spatial.distance import cdist
+
+ d = cdist(pts, pts)
+ dtm = DistanceToMeasure(k, metric="precomputed")
+ r4 = dtm.fit_transform(d)
+ assert r4 == pytest.approx(r0)
+ dtm = DistanceToMeasure(k, metric="precomputed", n_jobs=2)
+ r4b = dtm.fit_transform(d)
+ assert r4b == pytest.approx(r0)
+ dtm = DistanceToMeasure(k, implementation="keops")
+ r5 = dtm.fit_transform(pts)
+ assert r5 == pytest.approx(r0)
+ pts2 = torch.tensor(pts, requires_grad=True)
+ assert pts2.grad is None
+ dtm = DistanceToMeasure(k, implementation="keops", enable_autodiff=True)
+ r6 = dtm.fit_transform(pts2)
+ assert r6.detach().numpy() == pytest.approx(r0)
+ r6.sum().backward()
+ assert not torch.isnan(pts2.grad).any()
+ pts2 = torch.tensor(pts, requires_grad=True)
+ assert pts2.grad is None
+ dtm = DistanceToMeasure(k, implementation="ckdtree", enable_autodiff=True)
+ r7 = dtm.fit_transform(pts2)
+ assert r7.detach().numpy() == pytest.approx(r0)
+ r7.sum().backward()
+ assert not torch.isnan(pts2.grad).any()
+
+
+def test_dtm_precomputed():
+ dist = numpy.array([[1.0, 3, 8], [1, 5, 5], [0, 2, 3]])
+ dtm = DistanceToMeasure(2, q=1, metric="neighbors")
+ r = dtm.fit_transform(dist)
+ assert r == pytest.approx([2.0, 3, 1])
+
+ dist = numpy.array([[2.0, 2], [0, 1], [3, 4]])
+ dtm = DistanceToMeasure(2, q=2, metric="neighbors")
+ r = dtm.fit_transform(dist)
+ assert r == pytest.approx([2.0, 0.707, 3.5355], rel=0.01)
diff --git a/src/python/test/test_knn.py b/src/python/test/test_knn.py
new file mode 100755
index 00000000..a87ec212
--- /dev/null
+++ b/src/python/test/test_knn.py
@@ -0,0 +1,130 @@
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Marc Glisse
+
+ Copyright (C) 2020 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+from gudhi.point_cloud.knn import KNearestNeighbors
+import numpy as np
+import pytest
+
+
+def test_knn_explicit():
+ base = np.array([[1.0, 1], [1, 2], [4, 2], [4, 3]])
+ query = np.array([[1.0, 1], [2, 2], [4, 4]])
+ knn = KNearestNeighbors(2, metric="manhattan", return_distance=True, return_index=True)
+ knn.fit(base)
+ r = knn.transform(query)
+ assert r[0] == pytest.approx(np.array([[0, 1], [1, 0], [3, 2]]))
+ assert r[1] == pytest.approx(np.array([[0.0, 1], [1, 2], [1, 2]]))
+
+ knn = KNearestNeighbors(2, metric="chebyshev", return_distance=True, return_index=False)
+ knn.fit(base)
+ r = knn.transform(query)
+ assert r == pytest.approx(np.array([[0.0, 1], [1, 1], [1, 2]]))
+ r = (
+ KNearestNeighbors(2, metric="chebyshev", return_distance=True, return_index=False, implementation="keops")
+ .fit(base)
+ .transform(query)
+ )
+ assert r == pytest.approx(np.array([[0.0, 1], [1, 1], [1, 2]]))
+ r = (
+ KNearestNeighbors(2, metric="chebyshev", return_distance=True, return_index=False, implementation="keops", enable_autodiff=True)
+ .fit(base)
+ .transform(query)
+ )
+ assert r == pytest.approx(np.array([[0.0, 1], [1, 1], [1, 2]]))
+
+ knn = KNearestNeighbors(2, metric="minkowski", p=3, return_distance=False, return_index=True)
+ knn.fit(base)
+ r = knn.transform(query)
+ assert np.array_equal(r, [[0, 1], [1, 0], [3, 2]])
+ r = (
+ KNearestNeighbors(2, metric="minkowski", p=3, return_distance=False, return_index=True, implementation="keops")
+ .fit(base)
+ .transform(query)
+ )
+ assert np.array_equal(r, [[0, 1], [1, 0], [3, 2]])
+
+ dist = np.array([[0.0, 3, 8], [1, 0, 5], [1, 2, 0]])
+ knn = KNearestNeighbors(2, metric="precomputed", return_index=True, return_distance=False)
+ r = knn.fit_transform(dist)
+ assert np.array_equal(r, [[0, 1], [1, 0], [2, 0]])
+ knn = KNearestNeighbors(2, metric="precomputed", return_index=True, return_distance=True, sort_results=True)
+ r = knn.fit_transform(dist)
+ assert np.array_equal(r[0], [[0, 1], [1, 0], [2, 0]])
+ assert np.array_equal(r[1], [[0, 3], [0, 1], [0, 1]])
+ # Second time in parallel
+ knn = KNearestNeighbors(2, metric="precomputed", return_index=True, return_distance=False, n_jobs=2, sort_results=True)
+ r = knn.fit_transform(dist)
+ assert np.array_equal(r, [[0, 1], [1, 0], [2, 0]])
+ knn = KNearestNeighbors(2, metric="precomputed", return_index=True, return_distance=True, n_jobs=2)
+ r = knn.fit_transform(dist)
+ assert np.array_equal(r[0], [[0, 1], [1, 0], [2, 0]])
+ assert np.array_equal(r[1], [[0, 3], [0, 1], [0, 1]])
+
+
+def test_knn_compare():
+ base = np.array([[1.0, 1], [1, 2], [4, 2], [4, 3]])
+ query = np.array([[1.0, 1], [2, 2], [4, 4]])
+ r0 = (
+ KNearestNeighbors(2, implementation="ckdtree", return_index=True, return_distance=False)
+ .fit(base)
+ .transform(query)
+ )
+ r1 = (
+ KNearestNeighbors(2, implementation="sklearn", return_index=True, return_distance=False)
+ .fit(base)
+ .transform(query)
+ )
+ r2 = (
+ KNearestNeighbors(2, implementation="hnsw", return_index=True, return_distance=False).fit(base).transform(query)
+ )
+ r3 = (
+ KNearestNeighbors(2, implementation="keops", return_index=True, return_distance=False)
+ .fit(base)
+ .transform(query)
+ )
+ assert np.array_equal(r0, r1) and np.array_equal(r0, r2) and np.array_equal(r0, r3)
+
+ r0 = (
+ KNearestNeighbors(2, implementation="ckdtree", return_index=True, return_distance=True)
+ .fit(base)
+ .transform(query)
+ )
+ r1 = (
+ KNearestNeighbors(2, implementation="sklearn", return_index=True, return_distance=True)
+ .fit(base)
+ .transform(query)
+ )
+ r2 = KNearestNeighbors(2, implementation="hnsw", return_index=True, return_distance=True).fit(base).transform(query)
+ r3 = (
+ KNearestNeighbors(2, implementation="keops", return_index=True, return_distance=True).fit(base).transform(query)
+ )
+ assert np.array_equal(r0[0], r1[0]) and np.array_equal(r0[0], r2[0]) and np.array_equal(r0[0], r3[0])
+ d0 = pytest.approx(r0[1])
+ assert r1[1] == d0 and r2[1] == d0 and r3[1] == d0
+
+
+def test_knn_nop():
+ # This doesn't look super useful...
+ p = np.array([[0.0]])
+ assert None is KNearestNeighbors(
+ k=1, return_index=False, return_distance=False, implementation="sklearn"
+ ).fit_transform(p)
+ assert None is KNearestNeighbors(
+ k=1, return_index=False, return_distance=False, implementation="ckdtree"
+ ).fit_transform(p)
+ assert None is KNearestNeighbors(
+ k=1, return_index=False, return_distance=False, implementation="hnsw", ef=5
+ ).fit_transform(p)
+ assert None is KNearestNeighbors(
+ k=1, return_index=False, return_distance=False, implementation="keops"
+ ).fit_transform(p)
+ assert None is KNearestNeighbors(
+ k=1, return_index=False, return_distance=False, metric="precomputed"
+ ).fit_transform(p)
diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py
index f7848379..2137d822 100755
--- a/src/python/test/test_simplex_tree.py
+++ b/src/python/test/test_simplex_tree.py
@@ -9,6 +9,7 @@
"""
from gudhi import SimplexTree
+import pytest
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
@@ -45,7 +46,6 @@ def test_insertion():
assert st.find([2, 3]) == False
# filtration test
- st.initialize_filtration()
assert st.filtration([0, 1, 2]) == 4.0
assert st.filtration([0, 2]) == 4.0
assert st.filtration([1, 2]) == 4.0
@@ -92,7 +92,6 @@ def test_insertion():
assert st.find([1]) == True
assert st.find([2]) == True
- st.initialize_filtration()
assert st.persistence(persistence_dim_max=True) == [
(1, (4.0, float("inf"))),
(0, (0.0, float("inf"))),
@@ -150,7 +149,6 @@ def test_expansion():
st.expansion(3)
assert st.num_vertices() == 7
assert st.num_simplices() == 22
- st.initialize_filtration()
assert list(st.get_filtration()) == [
([2], 0.1),
@@ -250,6 +248,87 @@ def test_make_filtration_non_decreasing():
assert st.filtration([3, 4]) == 2.0
assert st.filtration([4, 5]) == 2.0
+def test_extend_filtration():
+
+ # Inserted simplex:
+ # 5 4
+ # o o
+ # / \ /
+ # o o
+ # /2\ /3
+ # o o
+ # 1 0
+
+ st = SimplexTree()
+ st.insert([0,2])
+ st.insert([1,2])
+ st.insert([0,3])
+ st.insert([2,5])
+ st.insert([3,4])
+ st.insert([3,5])
+ st.assign_filtration([0], 1.)
+ st.assign_filtration([1], 2.)
+ st.assign_filtration([2], 3.)
+ st.assign_filtration([3], 4.)
+ st.assign_filtration([4], 5.)
+ st.assign_filtration([5], 6.)
+
+ assert list(st.get_filtration()) == [
+ ([0, 2], 0.0),
+ ([1, 2], 0.0),
+ ([0, 3], 0.0),
+ ([3, 4], 0.0),
+ ([2, 5], 0.0),
+ ([3, 5], 0.0),
+ ([0], 1.0),
+ ([1], 2.0),
+ ([2], 3.0),
+ ([3], 4.0),
+ ([4], 5.0),
+ ([5], 6.0)
+ ]
+
+ st.extend_filtration()
+
+ assert list(st.get_filtration()) == [
+ ([6], -3.0),
+ ([0], -2.0),
+ ([1], -1.8),
+ ([2], -1.6),
+ ([0, 2], -1.6),
+ ([1, 2], -1.6),
+ ([3], -1.4),
+ ([0, 3], -1.4),
+ ([4], -1.2),
+ ([3, 4], -1.2),
+ ([5], -1.0),
+ ([2, 5], -1.0),
+ ([3, 5], -1.0),
+ ([5, 6], 1.0),
+ ([4, 6], 1.2),
+ ([3, 6], 1.4),
+ ([3, 4, 6], 1.4),
+ ([3, 5, 6], 1.4),
+ ([2, 6], 1.6),
+ ([2, 5, 6], 1.6),
+ ([1, 6], 1.8),
+ ([1, 2, 6], 1.8),
+ ([0, 6], 2.0),
+ ([0, 2, 6], 2.0),
+ ([0, 3, 6], 2.0)
+ ]
+
+ dgms = st.extended_persistence(min_persistence=-1.)
+
+ assert dgms[0][0][1][0] == pytest.approx(2.)
+ assert dgms[0][0][1][1] == pytest.approx(3.)
+ assert dgms[1][0][1][0] == pytest.approx(5.)
+ assert dgms[1][0][1][1] == pytest.approx(4.)
+ assert dgms[2][0][1][0] == pytest.approx(1.)
+ assert dgms[2][0][1][1] == pytest.approx(6.)
+ assert dgms[3][0][1][0] == pytest.approx(6.)
+ assert dgms[3][0][1][1] == pytest.approx(1.)
+
def test_simplices_iterator():
st = SimplexTree()
diff --git a/src/python/test/test_subsampling.py b/src/python/test/test_subsampling.py
index fe0985fa..31f64e32 100755
--- a/src/python/test/test_subsampling.py
+++ b/src/python/test/test_subsampling.py
@@ -120,7 +120,6 @@ def test_simple_pick_n_random_points():
# Go furter than point set on purpose
for iter in range(1, 10):
sub_set = gudhi.pick_n_random_points(points=point_set, nb_points=iter)
- print(5)
for sub in sub_set:
found = False
for point in point_set:
diff --git a/src/python/test/test_wasserstein_barycenter.py b/src/python/test/test_wasserstein_barycenter.py
new file mode 100755
index 00000000..f68c748e
--- /dev/null
+++ b/src/python/test/test_wasserstein_barycenter.py
@@ -0,0 +1,46 @@
+from gudhi.wasserstein.barycenter import lagrangian_barycenter
+import numpy as np
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Theo Lacombe
+
+ Copyright (C) 2019 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Theo Lacombe"
+__copyright__ = "Copyright (C) 2019 Inria"
+__license__ = "MIT"
+
+
+def test_lagrangian_barycenter():
+
+ dg1 = np.array([[0.2, 0.5]])
+ dg2 = np.array([[0.2, 0.7]])
+ dg3 = np.array([[0.3, 0.6], [0.7, 0.8], [0.2, 0.3]])
+ dg4 = np.array([])
+ dg5 = np.array([])
+ dg6 = np.array([])
+ res = np.array([[0.27916667, 0.55416667], [0.7375, 0.7625], [0.2375, 0.2625]])
+
+ dg7 = np.array([[0.1, 0.15], [0.1, 0.7], [0.2, 0.22], [0.55, 0.84], [0.11, 0.91], [0.61, 0.75], [0.33, 0.46], [0.12, 0.41], [0.32, 0.48]])
+ dg8 = np.array([[0., 4.], [4, 8]])
+
+ # error crit.
+ eps = 1e-7
+
+
+ assert np.linalg.norm(lagrangian_barycenter(pdiagset=[dg1, dg2, dg3, dg4],init=3, verbose=False) - res) < eps
+ assert np.array_equal(lagrangian_barycenter(pdiagset=[dg4, dg5, dg6], verbose=False), np.empty(shape=(0,2)))
+ assert np.linalg.norm(lagrangian_barycenter(pdiagset=[dg7], verbose=False) - dg7) < eps
+ Y, log = lagrangian_barycenter(pdiagset=[dg4, dg8], verbose=True)
+ assert np.linalg.norm(Y - np.array([[1,3], [5, 7]])) < eps
+ assert np.abs(log["energy"] - 2) < eps
+ assert np.array_equal(log["groupings"][0] , np.array([[0, -1], [1, -1]]))
+ assert np.array_equal(log["groupings"][1] , np.array([[0, 0], [1, 1]]))
+ assert np.linalg.norm(lagrangian_barycenter(pdiagset=[dg8, dg4], init=np.array([[0.2, 0.6], [0.5, 0.7]]), verbose=False) - np.array([[1, 3], [5, 7]])) < eps
+ assert lagrangian_barycenter(pdiagset = []) is None
+
diff --git a/src/python/test/test_wasserstein_distance.py b/src/python/test/test_wasserstein_distance.py
index 0d70e11a..1a4acc1d 100755
--- a/src/python/test/test_wasserstein_distance.py
+++ b/src/python/test/test_wasserstein_distance.py
@@ -8,6 +8,7 @@
- YYYY/MM Author: Description of the modification
"""
+from gudhi.wasserstein.wasserstein import _proj_on_diag
from gudhi.wasserstein import wasserstein_distance as pot
from gudhi.hera import wasserstein_distance as hera
import numpy as np
@@ -17,6 +18,12 @@ __author__ = "Theo Lacombe"
__copyright__ = "Copyright (C) 2019 Inria"
__license__ = "MIT"
+def test_proj_on_diag():
+ dgm = np.array([[1., 1.], [1., 2.], [3., 5.]])
+ assert np.array_equal(_proj_on_diag(dgm), [[1., 1.], [1.5, 1.5], [4., 4.]])
+ empty = np.empty((0, 2))
+ assert np.array_equal(_proj_on_diag(empty), empty)
+
def _basic_wasserstein(wasserstein_distance, delta, test_infinity=True, test_matching=True):
diag1 = np.array([[2.7, 3.7], [9.6, 14.0], [34.2, 34.974]])
diag2 = np.array([[2.8, 4.45], [9.5, 14.1]])
@@ -70,7 +77,7 @@ def _basic_wasserstein(wasserstein_distance, delta, test_infinity=True, test_mat
assert np.array_equal(match , [[0, -1], [1, -1]])
match = wasserstein_distance(diag1, diag2, matching=True, internal_p=2., order=2.)[1]
assert np.array_equal(match, [[0, 0], [1, 1], [2, -1]])
-
+
def hera_wrap(delta):