From 0c47b28201093851140ab499331017ef42312ce7 Mon Sep 17 00:00:00 2001 From: yuichi-ike Date: Thu, 21 May 2020 11:02:00 +0900 Subject: DTM Rips added (straightforward way) --- src/python/CMakeLists.txt | 10 ++++- src/python/doc/rips_complex_ref.rst | 13 ++++++ src/python/doc/rips_complex_sum.inc | 3 ++ src/python/doc/rips_complex_user.rst | 20 +++++++++ src/python/gudhi/dtm_rips_complex.py | 46 +++++++++++++++++++ src/python/test/test_dtm_rips_complex.py | 40 +++++++++++++++++ src/python/test/test_weighted_rips.py | 63 --------------------------- src/python/test/test_weighted_rips_complex.py | 63 +++++++++++++++++++++++++++ 8 files changed, 194 insertions(+), 64 deletions(-) create mode 100644 src/python/gudhi/dtm_rips_complex.py create mode 100644 src/python/test/test_dtm_rips_complex.py delete mode 100644 src/python/test/test_weighted_rips.py create mode 100644 src/python/test/test_weighted_rips_complex.py diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index ab08cd6d..96dd3f6f 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -58,6 +58,7 @@ if(PYTHONINTERP_FOUND) set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'wasserstein', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'point_cloud', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'weighted_rips_complex', ") + set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'dtm_rips_complex', ") add_gudhi_debug_info("Python version ${PYTHON_VERSION_STRING}") add_gudhi_debug_info("Cython version ${CYTHON_VERSION}") @@ -234,6 +235,7 @@ if(PYTHONINTERP_FOUND) file(COPY "gudhi/wasserstein" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + file(COPY "gudhi/dtm_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") add_custom_command( OUTPUT gudhi.so @@ -492,9 +494,15 @@ if(PYTHONINTERP_FOUND) # Weighted Rips if(SCIPY_FOUND) - add_gudhi_py_test(test_weighted_rips) + add_gudhi_py_test(test_weighted_rips_complex) endif() + # DTM Rips + if(SCIPY_FOUND) + add_gudhi_py_test(test_dtm_rips_complex) + endif() + + # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") else(CYTHON_FOUND) diff --git a/src/python/doc/rips_complex_ref.rst b/src/python/doc/rips_complex_ref.rst index 5f3e46c1..f781fd92 100644 --- a/src/python/doc/rips_complex_ref.rst +++ b/src/python/doc/rips_complex_ref.rst @@ -25,3 +25,16 @@ Weighted Rips complex reference manual :show-inheritance: .. automethod:: gudhi.weighted_rips_complex.WeightedRipsComplex.__init__ + +.. _dtm-rips-complex-reference-manual: + +================================= +DTM Rips complex reference manual +================================= + +.. autoclass:: gudhi.dtm_rips_complex.DtmRipsComplex + :members: + :undoc-members: + :show-inheritance: + + .. automethod:: gudhi.dtm_rips_complex.DtmRipsComplex.__init__ \ No newline at end of file diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc index f7580714..9cd8074b 100644 --- a/src/python/doc/rips_complex_sum.inc +++ b/src/python/doc/rips_complex_sum.inc @@ -14,6 +14,9 @@ | | | | | | Weighted Rips complex constructs a simplicial complex from a distance | | | | matrix and weights on vertices. | | + | | | | + | | DTM Rips complex builds a simplicial complex from a point set or | | + | | a distance matrix. | | +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` | +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst index 819568be..eb2657df 100644 --- a/src/python/doc/rips_complex_user.rst +++ b/src/python/doc/rips_complex_user.rst @@ -398,3 +398,23 @@ The output is: .. testoutput:: [(0, (3.1622776601683795, inf)), (0, (3.1622776601683795, 5.39834563766817)), (0, (3.1622776601683795, 5.39834563766817))] + +DTM Rips Complex +---------------- + +`DtmdRipsComplex `_ builds a simplicial complex from a point set or a full distence matrix (in the form of ndarray), as described in the above example. + +.. testcode:: + + import numpy as np + from dtm_rips_complex import DtmRipsComplex + pts = np.array([[2.0, 2.0], [0.0, 1.0], [3.0, 4.0]]) + dtm_rips = DtmRipsComplex(points=pts, k=2) + st = dtm_rips.create_simplex_tree(max_dimension=2) + print(st.persistence()) + +The output is: + +.. testoutput:: + + [(0, (3.1622776601683795, inf)), (0, (3.1622776601683795, 5.39834563766817)), (0, (3.1622776601683795, 5.39834563766817))] diff --git a/src/python/gudhi/dtm_rips_complex.py b/src/python/gudhi/dtm_rips_complex.py new file mode 100644 index 00000000..6d2f9f31 --- /dev/null +++ b/src/python/gudhi/dtm_rips_complex.py @@ -0,0 +1,46 @@ +# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. +# Author(s): Yuichi Ike, Raphaël Tinarrage +# +# Copyright (C) 2020 Inria, Copyright (C) 2020 FUjitsu Laboratories Ltd. +# +# Modification(s): +# - YYYY/MM Author: Description of the modification + + +from gudhi.weighted_rips_complex import WeightedRipsComplex +from gudhi.point_cloud.dtm import DistanceToMeasure +from scipy.spatial.distance import cdist + +class DtmRipsComplex(WeightedRipsComplex): + """ + Class to generate a DTM Rips complex from a distance matrix or a point set, + in the way described in :cite:`dtmfiltrations`. + Remark that all the filtration values are doubled compared to the definition in the paper + for the consistency with RipsComplex. + """ + def __init__(self, + points=None, + distance_matrix=None, + k=1, + q=2, + max_filtration=float('inf')): + """ + Args: + points (Sequence[Sequence[float]]): list of points. + distance_matrix (ndarray): full distance matrix. + k (int): number of neighbors for the computation of DTM. Defaults to 1, which is equivalent to the usual Rips complex. + q (float): order used to compute the distance to measure. Defaults to 2. + max_filtration (float): specifies the maximal filtration value to be considered. + """ + if distance_matrix is None: + if points is None: + # Empty Rips construction + points=[] + distance_matrix = cdist(points,points) + self.distance_matrix = distance_matrix + dtm = DistanceToMeasure(k, q=q, metric="precomputed") + # TODO: address the error when k is too large + self.weights = dtm.fit_transform(distance_matrix) + self.max_filtration = max_filtration + diff --git a/src/python/test/test_dtm_rips_complex.py b/src/python/test/test_dtm_rips_complex.py new file mode 100644 index 00000000..bc6e5a59 --- /dev/null +++ b/src/python/test/test_dtm_rips_complex.py @@ -0,0 +1,40 @@ +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Yuichi Ike + + Copyright (C) 2020 Inria, Copyright (C) 2020 FUjitsu Laboratories Ltd. + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +from gudhi.dtm_rips_complex import DtmRipsComplex +import numpy as np +from math import sqrt +import pytest + +def test_dtm_rips_complex(): + pts = np.array([[2.0, 2.0], [0.0, 1.0], [3.0, 4.0]]) + dtm_rips = DtmRipsComplex(points=pts, k=2) + st = dtm_rips.create_simplex_tree(max_dimension=2) + st.persistence() + persistence_intervals0 = st.persistence_intervals_in_dimension(0) + assert persistence_intervals0 == pytest.approx(np.array([[3.16227766, 5.39834564],[3.16227766, 5.39834564], [3.16227766, float("inf")]])) + +def test_compatibility_with_rips(): + distance_matrix = np.array([[0, 1, 1, sqrt(2)], [1, 0, sqrt(2), 1], [1, sqrt(2), 0, 1], [sqrt(2), 1, 1, 0]]) + dtm_rips = DtmRipsComplex(distance_matrix=distance_matrix, max_filtration=42) + st = dtm_rips.create_simplex_tree(max_dimension=1) + assert list(st.get_filtration()) == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([1, 3], 1.0), + ([2, 3], 1.0), + ([1, 2], sqrt(2)), + ([0, 3], sqrt(2)), + ] + diff --git a/src/python/test/test_weighted_rips.py b/src/python/test/test_weighted_rips.py deleted file mode 100644 index 7ef48333..00000000 --- a/src/python/test/test_weighted_rips.py +++ /dev/null @@ -1,63 +0,0 @@ -""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - Author(s): Yuichi Ike and Masatoshi Takenouchi - - Copyright (C) 2020 Inria - - Modification(s): - - YYYY/MM Author: Description of the modification -""" - -from gudhi.weighted_rips_complex import WeightedRipsComplex -from gudhi.point_cloud.dtm import DistanceToMeasure -import numpy as np -from math import sqrt -from scipy.spatial.distance import cdist -import pytest - -def test_non_dtm_rips_complex(): - dist = [[], [1]] - weights = [1, 100] - w_rips = WeightedRipsComplex(distance_matrix=dist, weights=weights) - st = w_rips.create_simplex_tree(max_dimension=2) - assert st.filtration([0,1]) == pytest.approx(200.0) - -def test_compatibility_with_rips(): - distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] - w_rips = WeightedRipsComplex(distance_matrix=distance_matrix,max_filtration=42) - st = w_rips.create_simplex_tree(max_dimension=1) - assert list(st.get_filtration()) == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([3], 0.0), - ([0, 1], 1.0), - ([0, 2], 1.0), - ([1, 3], 1.0), - ([2, 3], 1.0), - ([1, 2], sqrt(2)), - ([0, 3], sqrt(2)), - ] - -def test_compatibility_with_filtered_rips(): - distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] - w_rips = WeightedRipsComplex(distance_matrix=distance_matrix,max_filtration=1.0) - st = w_rips.create_simplex_tree(max_dimension=1) - - assert st.__is_defined() == True - assert st.__is_persistence_defined() == False - - assert st.num_simplices() == 8 - assert st.num_vertices() == 4 - -def test_dtm_rips_complex(): - pts = np.array([[2.0, 2.0], [0.0, 1.0], [3.0, 4.0]]) - dist = cdist(pts,pts) - dtm = DistanceToMeasure(2, q=2, metric="precomputed") - r = dtm.fit_transform(dist) - w_rips = WeightedRipsComplex(distance_matrix=dist, weights=r) - st = w_rips.create_simplex_tree(max_dimension=2) - st.persistence() - persistence_intervals0 = st.persistence_intervals_in_dimension(0) - assert persistence_intervals0 == pytest.approx(np.array([[3.16227766, 5.39834564],[3.16227766, 5.39834564], [3.16227766, float("inf")]])) - diff --git a/src/python/test/test_weighted_rips_complex.py b/src/python/test/test_weighted_rips_complex.py new file mode 100644 index 00000000..7ef48333 --- /dev/null +++ b/src/python/test/test_weighted_rips_complex.py @@ -0,0 +1,63 @@ +""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + Author(s): Yuichi Ike and Masatoshi Takenouchi + + Copyright (C) 2020 Inria + + Modification(s): + - YYYY/MM Author: Description of the modification +""" + +from gudhi.weighted_rips_complex import WeightedRipsComplex +from gudhi.point_cloud.dtm import DistanceToMeasure +import numpy as np +from math import sqrt +from scipy.spatial.distance import cdist +import pytest + +def test_non_dtm_rips_complex(): + dist = [[], [1]] + weights = [1, 100] + w_rips = WeightedRipsComplex(distance_matrix=dist, weights=weights) + st = w_rips.create_simplex_tree(max_dimension=2) + assert st.filtration([0,1]) == pytest.approx(200.0) + +def test_compatibility_with_rips(): + distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] + w_rips = WeightedRipsComplex(distance_matrix=distance_matrix,max_filtration=42) + st = w_rips.create_simplex_tree(max_dimension=1) + assert list(st.get_filtration()) == [ + ([0], 0.0), + ([1], 0.0), + ([2], 0.0), + ([3], 0.0), + ([0, 1], 1.0), + ([0, 2], 1.0), + ([1, 3], 1.0), + ([2, 3], 1.0), + ([1, 2], sqrt(2)), + ([0, 3], sqrt(2)), + ] + +def test_compatibility_with_filtered_rips(): + distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]] + w_rips = WeightedRipsComplex(distance_matrix=distance_matrix,max_filtration=1.0) + st = w_rips.create_simplex_tree(max_dimension=1) + + assert st.__is_defined() == True + assert st.__is_persistence_defined() == False + + assert st.num_simplices() == 8 + assert st.num_vertices() == 4 + +def test_dtm_rips_complex(): + pts = np.array([[2.0, 2.0], [0.0, 1.0], [3.0, 4.0]]) + dist = cdist(pts,pts) + dtm = DistanceToMeasure(2, q=2, metric="precomputed") + r = dtm.fit_transform(dist) + w_rips = WeightedRipsComplex(distance_matrix=dist, weights=r) + st = w_rips.create_simplex_tree(max_dimension=2) + st.persistence() + persistence_intervals0 = st.persistence_intervals_in_dimension(0) + assert persistence_intervals0 == pytest.approx(np.array([[3.16227766, 5.39834564],[3.16227766, 5.39834564], [3.16227766, float("inf")]])) + -- cgit v1.2.3 From c4e93ba5f1d003c442e3d56d6a0b3e80651dd6ec Mon Sep 17 00:00:00 2001 From: yuichi-ike Date: Thu, 21 May 2020 11:28:51 +0900 Subject: bug fixed --- src/python/doc/rips_complex_user.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst index eb2657df..ac11a4b6 100644 --- a/src/python/doc/rips_complex_user.rst +++ b/src/python/doc/rips_complex_user.rst @@ -407,7 +407,7 @@ DTM Rips Complex .. testcode:: import numpy as np - from dtm_rips_complex import DtmRipsComplex + from gudhi.dtm_rips_complex import DtmRipsComplex pts = np.array([[2.0, 2.0], [0.0, 1.0], [3.0, 4.0]]) dtm_rips = DtmRipsComplex(points=pts, k=2) st = dtm_rips.create_simplex_tree(max_dimension=2) -- cgit v1.2.3 From c7b82f49f01075519189f1fdb56cc485e4ad9f46 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 21 May 2020 11:08:25 +0200 Subject: Use unique_pointer and template alpha complex interface for python interface --- .../utilities/alpha_complex_persistence.cpp | 1 + src/python/gudhi/alpha_complex.pyx | 2 +- src/python/include/Alpha_complex_interface.h | 18 +++++++++--------- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp index 7c898dfd..e17831d9 100644 --- a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp +++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp @@ -11,6 +11,7 @@ #include #include +#include #include #include diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index d75e374a..2b7ce00d 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -26,7 +26,7 @@ __copyright__ = "Copyright (C) 2016 Inria" __license__ = "GPL v3" cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": - cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": + cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": Alpha_complex_interface(vector[vector[double]] points) nogil except + # bool from_file is a workaround for cython to find the correct signature Alpha_complex_interface(string off_file, bool from_file) nogil except + diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h index 40de88f3..5fb694cd 100644 --- a/src/python/include/Alpha_complex_interface.h +++ b/src/python/include/Alpha_complex_interface.h @@ -23,29 +23,29 @@ #include #include #include +#include // for std::unique_ptr namespace Gudhi { namespace alpha_complex { +using Exact_kernel = CGAL::Epeck_d< CGAL::Dynamic_dimension_tag >; +using Inexact_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; + +template class Alpha_complex_interface { - using Dynamic_kernel = CGAL::Epeck_d< CGAL::Dynamic_dimension_tag >; - using Point_d = Dynamic_kernel::Point_d; + using Point_d = typename Kernel::Point_d; public: Alpha_complex_interface(const std::vector>& points) { auto mkpt = [](std::vector const& vec){ return Point_d(vec.size(), vec.begin(), vec.end()); }; - alpha_complex_ = new Alpha_complex(boost::adaptors::transform(points, mkpt)); + alpha_complex_ = std::make_unique>(boost::adaptors::transform(points, mkpt)); } Alpha_complex_interface(const std::string& off_file_name, bool from_file = true) { - alpha_complex_ = new Alpha_complex(off_file_name); - } - - ~Alpha_complex_interface() { - delete alpha_complex_; + alpha_complex_ = std::make_unique>(off_file_name); } std::vector get_point(int vh) { @@ -61,7 +61,7 @@ class Alpha_complex_interface { } private: - Alpha_complex* alpha_complex_; + std::unique_ptr> alpha_complex_; }; } // namespace alpha_complex -- cgit v1.2.3 From 2ccc5ea97a5979f80fec93863da5549e4e6f2eea Mon Sep 17 00:00:00 2001 From: yuichi-ike Date: Fri, 22 May 2020 10:22:31 +0900 Subject: class name changed, documents modified --- src/python/doc/rips_complex_ref.rst | 4 ++-- src/python/doc/rips_complex_user.rst | 8 +++++--- src/python/gudhi/dtm_rips_complex.py | 12 ++++++++---- src/python/test/test_dtm_rips_complex.py | 6 +++--- 4 files changed, 18 insertions(+), 12 deletions(-) diff --git a/src/python/doc/rips_complex_ref.rst b/src/python/doc/rips_complex_ref.rst index f781fd92..2aa6b268 100644 --- a/src/python/doc/rips_complex_ref.rst +++ b/src/python/doc/rips_complex_ref.rst @@ -32,9 +32,9 @@ Weighted Rips complex reference manual DTM Rips complex reference manual ================================= -.. autoclass:: gudhi.dtm_rips_complex.DtmRipsComplex +.. autoclass:: gudhi.dtm_rips_complex.DTMRipsComplex :members: :undoc-members: :show-inheritance: - .. automethod:: gudhi.dtm_rips_complex.DtmRipsComplex.__init__ \ No newline at end of file + .. automethod:: gudhi.dtm_rips_complex.DTMRipsComplex.__init__ \ No newline at end of file diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst index ac11a4b6..450e6c1a 100644 --- a/src/python/doc/rips_complex_user.rst +++ b/src/python/doc/rips_complex_user.rst @@ -378,6 +378,7 @@ Example from a point cloud combined with DistanceToMeasure ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Combining with DistanceToMeasure, one can compute the DTM-filtration of a point set, as in `this notebook `_. +Remark that DTMRipsComplex class provides exactly this function. .. testcode:: @@ -402,14 +403,15 @@ The output is: DTM Rips Complex ---------------- -`DtmdRipsComplex `_ builds a simplicial complex from a point set or a full distence matrix (in the form of ndarray), as described in the above example. +`DTMRipsComplex `_ builds a simplicial complex from a point set or a full distence matrix (in the form of ndarray), as described in the above example. +This class constructs a weighted Rips complex giving larger weights to outliers, which reduces their impact on the persistence diagram. See `this notebook `_ for some experiments. .. testcode:: import numpy as np - from gudhi.dtm_rips_complex import DtmRipsComplex + from gudhi.dtm_rips_complex import DTMRipsComplex pts = np.array([[2.0, 2.0], [0.0, 1.0], [3.0, 4.0]]) - dtm_rips = DtmRipsComplex(points=pts, k=2) + dtm_rips = DTMRipsComplex(points=pts, k=2) st = dtm_rips.create_simplex_tree(max_dimension=2) print(st.persistence()) diff --git a/src/python/gudhi/dtm_rips_complex.py b/src/python/gudhi/dtm_rips_complex.py index 6d2f9f31..70c8e5dd 100644 --- a/src/python/gudhi/dtm_rips_complex.py +++ b/src/python/gudhi/dtm_rips_complex.py @@ -12,7 +12,7 @@ from gudhi.weighted_rips_complex import WeightedRipsComplex from gudhi.point_cloud.dtm import DistanceToMeasure from scipy.spatial.distance import cdist -class DtmRipsComplex(WeightedRipsComplex): +class DTMRipsComplex(WeightedRipsComplex): """ Class to generate a DTM Rips complex from a distance matrix or a point set, in the way described in :cite:`dtmfiltrations`. @@ -28,7 +28,7 @@ class DtmRipsComplex(WeightedRipsComplex): """ Args: points (Sequence[Sequence[float]]): list of points. - distance_matrix (ndarray): full distance matrix. + distance_matrix (numpy.ndarray): full distance matrix. k (int): number of neighbors for the computation of DTM. Defaults to 1, which is equivalent to the usual Rips complex. q (float): order used to compute the distance to measure. Defaults to 2. max_filtration (float): specifies the maximal filtration value to be considered. @@ -39,8 +39,12 @@ class DtmRipsComplex(WeightedRipsComplex): points=[] distance_matrix = cdist(points,points) self.distance_matrix = distance_matrix - dtm = DistanceToMeasure(k, q=q, metric="precomputed") + # TODO: address the error when k is too large - self.weights = dtm.fit_transform(distance_matrix) + if k <= 1: + self.weights = [0] * len(distance_matrix) + else: + dtm = DistanceToMeasure(k, q=q, metric="precomputed") + self.weights = dtm.fit_transform(distance_matrix) self.max_filtration = max_filtration diff --git a/src/python/test/test_dtm_rips_complex.py b/src/python/test/test_dtm_rips_complex.py index bc6e5a59..7cd2ad90 100644 --- a/src/python/test/test_dtm_rips_complex.py +++ b/src/python/test/test_dtm_rips_complex.py @@ -8,14 +8,14 @@ - YYYY/MM Author: Description of the modification """ -from gudhi.dtm_rips_complex import DtmRipsComplex +from gudhi.dtm_rips_complex import DTMRipsComplex import numpy as np from math import sqrt import pytest def test_dtm_rips_complex(): pts = np.array([[2.0, 2.0], [0.0, 1.0], [3.0, 4.0]]) - dtm_rips = DtmRipsComplex(points=pts, k=2) + dtm_rips = DTMRipsComplex(points=pts, k=2) st = dtm_rips.create_simplex_tree(max_dimension=2) st.persistence() persistence_intervals0 = st.persistence_intervals_in_dimension(0) @@ -23,7 +23,7 @@ def test_dtm_rips_complex(): def test_compatibility_with_rips(): distance_matrix = np.array([[0, 1, 1, sqrt(2)], [1, 0, sqrt(2), 1], [1, sqrt(2), 0, 1], [sqrt(2), 1, 1, 0]]) - dtm_rips = DtmRipsComplex(distance_matrix=distance_matrix, max_filtration=42) + dtm_rips = DTMRipsComplex(distance_matrix=distance_matrix, max_filtration=42) st = dtm_rips.create_simplex_tree(max_dimension=1) assert list(st.get_filtration()) == [ ([0], 0.0), -- cgit v1.2.3 From 4826b8bf49aafb23c57eb6983c886235e8f7c0b2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 22 May 2020 18:04:33 +0200 Subject: brute force version - complexity is read only --- src/python/gudhi/alpha_complex.pyx | 69 ++++++++++++++++++++++++++++---------- 1 file changed, 52 insertions(+), 17 deletions(-) diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index 2b7ce00d..dc2fcb01 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -26,10 +26,18 @@ __copyright__ = "Copyright (C) 2016 Inria" __license__ = "GPL v3" cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": - cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": - Alpha_complex_interface(vector[vector[double]] points) nogil except + + cdef cppclass Alpha_complex_exact_interface "Gudhi::alpha_complex::Alpha_complex_interface": + Alpha_complex_exact_interface(vector[vector[double]] points) nogil except + # bool from_file is a workaround for cython to find the correct signature - Alpha_complex_interface(string off_file, bool from_file) nogil except + + Alpha_complex_exact_interface(string off_file, bool from_file) nogil except + + vector[double] get_point(int vertex) nogil except + + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) nogil except + + +cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": + cdef cppclass Alpha_complex_inexact_interface "Gudhi::alpha_complex::Alpha_complex_interface": + Alpha_complex_inexact_interface(vector[vector[double]] points) nogil except + + # bool from_file is a workaround for cython to find the correct signature + Alpha_complex_inexact_interface(string off_file, bool from_file) nogil except + vector[double] get_point(int vertex) nogil except + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) nogil except + @@ -53,10 +61,12 @@ cdef class AlphaComplex: """ - cdef Alpha_complex_interface * thisptr + cdef Alpha_complex_exact_interface * exact_ptr + cdef Alpha_complex_inexact_interface * inexact_ptr + complexity = 'safe' # Fake constructor that does nothing but documenting the constructor - def __init__(self, points=None, off_file=''): + def __init__(self, points=None, off_file='', complexity='safe'): """AlphaComplex constructor. :param points: A list of points in d-Dimension. @@ -66,15 +76,23 @@ cdef class AlphaComplex: :param off_file: An OFF file style name. :type off_file: string + + :param complexity: Alpha complex complexity can be 'fast', 'safe' or 'exact'. Default is 'safe'. + :type complexity: string """ # The real cython constructor - def __cinit__(self, points = None, off_file = ''): + def __cinit__(self, points = None, off_file = '', complexity = 'safe'): + assert complexity == 'fast' or complexity == 'safe' or complexity == 'exact', "Alpha complex complexity can be 'fast', 'safe' or 'exact'" + self.complexity = complexity + cdef vector[vector[double]] pts if off_file: if os.path.isfile(off_file): - self.thisptr = new Alpha_complex_interface( - off_file.encode('utf-8'), True) + if complexity == 'fast': + self.inexact_ptr = new Alpha_complex_inexact_interface(off_file.encode('utf-8'), True) + else: + self.exact_ptr = new Alpha_complex_exact_interface(off_file.encode('utf-8'), True) else: print("file " + off_file + " not found.") else: @@ -82,18 +100,28 @@ cdef class AlphaComplex: # Empty Alpha construction points=[] pts = points - with nogil: - self.thisptr = new Alpha_complex_interface(pts) - + if complexity == 'fast': + with nogil: + self.inexact_ptr = new Alpha_complex_inexact_interface(pts) + else: + with nogil: + self.exact_ptr = new Alpha_complex_exact_interface(pts) def __dealloc__(self): - if self.thisptr != NULL: - del self.thisptr + if self.complexity == 'fast': + if self.inexact_ptr != NULL: + del self.inexact_ptr + else: + if self.exact_ptr != NULL: + del self.exact_ptr def __is_defined(self): """Returns true if AlphaComplex pointer is not NULL. """ - return self.thisptr != NULL + if self.complexity == 'fast': + return self.inexact_ptr != NULL + else: + return self.exact_ptr != NULL def get_point(self, vertex): """This function returns the point corresponding to a given vertex. @@ -103,7 +131,10 @@ cdef class AlphaComplex: :rtype: list of float :returns: the point. """ - return self.thisptr.get_point(vertex) + if self.complexity == 'fast': + return self.inexact_ptr.get_point(vertex) + else: + return self.exact_ptr.get_point(vertex) def create_simplex_tree(self, max_alpha_square = float('inf')): """ @@ -118,6 +149,10 @@ cdef class AlphaComplex: stree = SimplexTree() cdef double mas = max_alpha_square cdef intptr_t stree_int_ptr=stree.thisptr - with nogil: - self.thisptr.create_simplex_tree(stree_int_ptr, mas) + if self.complexity == 'fast': + with nogil: + self.inexact_ptr.create_simplex_tree(stree_int_ptr, mas) + else: + with nogil: + self.exact_ptr.create_simplex_tree(stree_int_ptr, mas) return stree -- cgit v1.2.3 From 28a6889dc9865c75acc3d3be4edce01b3942e56f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 23 May 2020 08:45:13 +0200 Subject: First working version --- src/python/gudhi/alpha_complex.pyx | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index dc2fcb01..e2d3db9c 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -63,7 +63,8 @@ cdef class AlphaComplex: cdef Alpha_complex_exact_interface * exact_ptr cdef Alpha_complex_inexact_interface * inexact_ptr - complexity = 'safe' + cdef bool fast + cdef bool exact # Fake constructor that does nothing but documenting the constructor def __init__(self, points=None, off_file='', complexity='safe'): @@ -84,12 +85,13 @@ cdef class AlphaComplex: # The real cython constructor def __cinit__(self, points = None, off_file = '', complexity = 'safe'): assert complexity == 'fast' or complexity == 'safe' or complexity == 'exact', "Alpha complex complexity can be 'fast', 'safe' or 'exact'" - self.complexity = complexity + self.fast = complexity == 'fast' + self.exact = complexity == 'safe' cdef vector[vector[double]] pts if off_file: if os.path.isfile(off_file): - if complexity == 'fast': + if self.fast: self.inexact_ptr = new Alpha_complex_inexact_interface(off_file.encode('utf-8'), True) else: self.exact_ptr = new Alpha_complex_exact_interface(off_file.encode('utf-8'), True) @@ -100,7 +102,7 @@ cdef class AlphaComplex: # Empty Alpha construction points=[] pts = points - if complexity == 'fast': + if self.fast: with nogil: self.inexact_ptr = new Alpha_complex_inexact_interface(pts) else: @@ -108,7 +110,7 @@ cdef class AlphaComplex: self.exact_ptr = new Alpha_complex_exact_interface(pts) def __dealloc__(self): - if self.complexity == 'fast': + if self.fast: if self.inexact_ptr != NULL: del self.inexact_ptr else: @@ -118,7 +120,7 @@ cdef class AlphaComplex: def __is_defined(self): """Returns true if AlphaComplex pointer is not NULL. """ - if self.complexity == 'fast': + if self.fast: return self.inexact_ptr != NULL else: return self.exact_ptr != NULL @@ -131,7 +133,7 @@ cdef class AlphaComplex: :rtype: list of float :returns: the point. """ - if self.complexity == 'fast': + if self.fast: return self.inexact_ptr.get_point(vertex) else: return self.exact_ptr.get_point(vertex) @@ -149,7 +151,7 @@ cdef class AlphaComplex: stree = SimplexTree() cdef double mas = max_alpha_square cdef intptr_t stree_int_ptr=stree.thisptr - if self.complexity == 'fast': + if self.fast: with nogil: self.inexact_ptr.create_simplex_tree(stree_int_ptr, mas) else: -- cgit v1.2.3 From 50b460f867b5801ce3459d60fb86b02051eb4a7d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 23 May 2020 09:52:24 +0200 Subject: Delaunay complex and tests all possibilities --- src/python/gudhi/alpha_complex.pyx | 22 ++++---- src/python/include/Alpha_complex_interface.h | 5 +- src/python/test/test_alpha_complex.py | 75 ++++++++++++++++++++++++---- 3 files changed, 82 insertions(+), 20 deletions(-) diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index e2d3db9c..700fa738 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -31,7 +31,7 @@ cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": # bool from_file is a workaround for cython to find the correct signature Alpha_complex_exact_interface(string off_file, bool from_file) nogil except + vector[double] get_point(int vertex) nogil except + - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) nogil except + + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, bool exact_version, bool default_filtration_value) nogil except + cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": cdef cppclass Alpha_complex_inexact_interface "Gudhi::alpha_complex::Alpha_complex_interface": @@ -39,7 +39,7 @@ cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": # bool from_file is a workaround for cython to find the correct signature Alpha_complex_inexact_interface(string off_file, bool from_file) nogil except + vector[double] get_point(int vertex) nogil except + - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) nogil except + + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, bool exact_version, bool default_filtration_value) nogil except + # AlphaComplex python interface cdef class AlphaComplex: @@ -138,23 +138,27 @@ cdef class AlphaComplex: else: return self.exact_ptr.get_point(vertex) - def create_simplex_tree(self, max_alpha_square = float('inf')): + def create_simplex_tree(self, max_alpha_square = float('inf'), default_filtration_value = False): """ - :param max_alpha_square: The maximum alpha square threshold the - simplices shall not exceed. Default is set to infinity, and - there is very little point using anything else since it does - not save time. + :param max_alpha_square: The maximum alpha square threshold the simplices shall not exceed. Default is set to + infinity, and there is very little point using anything else since it does not save time. :type max_alpha_square: float + :param default_filtration_value: Set this value to `True` if filtration values are not needed to be computed + (will be set to `NaN`). Default value is `False` (which means compute the filtration values). + :type default_filtration_value: bool :returns: A simplex tree created from the Delaunay Triangulation. :rtype: SimplexTree """ stree = SimplexTree() cdef double mas = max_alpha_square cdef intptr_t stree_int_ptr=stree.thisptr + cdef bool compute_filtration = default_filtration_value == True if self.fast: with nogil: - self.inexact_ptr.create_simplex_tree(stree_int_ptr, mas) + self.inexact_ptr.create_simplex_tree(stree_int_ptr, + mas, False, compute_filtration) else: with nogil: - self.exact_ptr.create_simplex_tree(stree_int_ptr, mas) + self.exact_ptr.create_simplex_tree(stree_int_ptr, + mas, self.exact, compute_filtration) return stree diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h index 5fb694cd..3dd01345 100644 --- a/src/python/include/Alpha_complex_interface.h +++ b/src/python/include/Alpha_complex_interface.h @@ -56,8 +56,9 @@ class Alpha_complex_interface { return vd; } - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square) { - alpha_complex_->create_complex(*simplex_tree, max_alpha_square); + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, + bool exact_version, bool default_filtration_value) { + alpha_complex_->create_complex(*simplex_tree, max_alpha_square, exact_version, default_filtration_value); } private: diff --git a/src/python/test/test_alpha_complex.py b/src/python/test/test_alpha_complex.py index 77121302..913397dd 100755 --- a/src/python/test/test_alpha_complex.py +++ b/src/python/test/test_alpha_complex.py @@ -24,14 +24,18 @@ __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -def test_empty_alpha(): - alpha_complex = AlphaComplex(points=[[0, 0]]) +def _empty_alpha(complexity): + alpha_complex = AlphaComplex(points=[[0, 0]], complexity = complexity) assert alpha_complex.__is_defined() == True +def test_empty_alpha(): + _empty_alpha('fast') + _empty_alpha('safe') + _empty_alpha('exact') -def test_infinite_alpha(): +def _infinite_alpha(complexity): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - alpha_complex = AlphaComplex(points=point_list) + alpha_complex = AlphaComplex(points=point_list, complexity = complexity) assert alpha_complex.__is_defined() == True simplex_tree = alpha_complex.create_simplex_tree() @@ -79,10 +83,14 @@ def test_infinite_alpha(): else: assert False +def test_infinite_alpha(): + _infinite_alpha('fast') + _infinite_alpha('safe') + _infinite_alpha('exact') -def test_filtered_alpha(): +def _filtered_alpha(complexity): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - filtered_alpha = AlphaComplex(points=point_list) + filtered_alpha = AlphaComplex(points=point_list, complexity = complexity) simplex_tree = filtered_alpha.create_simplex_tree(max_alpha_square=0.25) @@ -119,7 +127,12 @@ def test_filtered_alpha(): assert simplex_tree.get_star([0]) == [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)] assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)] -def test_safe_alpha_persistence_comparison(): +def test_filtered_alpha(): + _filtered_alpha('fast') + _filtered_alpha('safe') + _filtered_alpha('exact') + +def _safe_alpha_persistence_comparison(complexity): #generate periodic signal time = np.arange(0, 10, 1) signal = [math.sin(x) for x in time] @@ -131,10 +144,10 @@ def test_safe_alpha_persistence_comparison(): embedding2 = [[signal[i], delayed[i]] for i in range(len(time))] #build alpha complex and simplex tree - alpha_complex1 = AlphaComplex(points=embedding1) + alpha_complex1 = AlphaComplex(points=embedding1, complexity = complexity) simplex_tree1 = alpha_complex1.create_simplex_tree() - alpha_complex2 = AlphaComplex(points=embedding2) + alpha_complex2 = AlphaComplex(points=embedding2, complexity = complexity) simplex_tree2 = alpha_complex2.create_simplex_tree() diag1 = simplex_tree1.persistence() @@ -143,3 +156,47 @@ def test_safe_alpha_persistence_comparison(): for (first_p, second_p) in zip_longest(diag1, diag2): assert first_p[0] == pytest.approx(second_p[0]) assert first_p[1] == pytest.approx(second_p[1]) + + +def test_safe_alpha_persistence_comparison(): + # Won't work for 'fast' version + _safe_alpha_persistence_comparison('safe') + _safe_alpha_persistence_comparison('exact') + +def _delaunay_complex(complexity): + point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] + filtered_alpha = AlphaComplex(points=point_list, complexity = complexity) + + simplex_tree = filtered_alpha.create_simplex_tree(default_filtration_value = True) + + assert simplex_tree.num_simplices() == 11 + assert simplex_tree.num_vertices() == 4 + + assert point_list[0] == filtered_alpha.get_point(0) + assert point_list[1] == filtered_alpha.get_point(1) + assert point_list[2] == filtered_alpha.get_point(2) + assert point_list[3] == filtered_alpha.get_point(3) + try: + filtered_alpha.get_point(4) == [] + except IndexError: + pass + else: + assert False + try: + filtered_alpha.get_point(125) == [] + except IndexError: + pass + else: + assert False + + for filtered_value in simplex_tree.get_filtration(): + assert math.isnan(filtered_value[1]) + for filtered_value in simplex_tree.get_star([0]): + assert math.isnan(filtered_value[1]) + for filtered_value in simplex_tree.get_cofaces([0], 1): + assert math.isnan(filtered_value[1]) + +def test_delaunay_complex(): + _delaunay_complex('fast') + _delaunay_complex('safe') + _delaunay_complex('exact') -- cgit v1.2.3 From 78fb7ccd413ca655bdbe4adc9b4b256f20e11fe5 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 24 May 2020 10:16:58 +0200 Subject: c++ version to trigger exact/inexact kernel --- src/python/gudhi/alpha_complex.pyx | 59 ++++++---------------- src/python/include/Alpha_complex_interface.h | 73 ++++++++++++++++++++-------- 2 files changed, 68 insertions(+), 64 deletions(-) diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index 700fa738..5bc9ebc4 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -26,18 +26,10 @@ __copyright__ = "Copyright (C) 2016 Inria" __license__ = "GPL v3" cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": - cdef cppclass Alpha_complex_exact_interface "Gudhi::alpha_complex::Alpha_complex_interface": - Alpha_complex_exact_interface(vector[vector[double]] points) nogil except + + cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface": + Alpha_complex_interface(vector[vector[double]] points, bool fast_version) nogil except + # bool from_file is a workaround for cython to find the correct signature - Alpha_complex_exact_interface(string off_file, bool from_file) nogil except + - vector[double] get_point(int vertex) nogil except + - void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, bool exact_version, bool default_filtration_value) nogil except + - -cdef extern from "Alpha_complex_interface.h" namespace "Gudhi": - cdef cppclass Alpha_complex_inexact_interface "Gudhi::alpha_complex::Alpha_complex_interface": - Alpha_complex_inexact_interface(vector[vector[double]] points) nogil except + - # bool from_file is a workaround for cython to find the correct signature - Alpha_complex_inexact_interface(string off_file, bool from_file) nogil except + + Alpha_complex_interface(string off_file, bool fast_version, bool from_file) nogil except + vector[double] get_point(int vertex) nogil except + void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, bool exact_version, bool default_filtration_value) nogil except + @@ -61,8 +53,7 @@ cdef class AlphaComplex: """ - cdef Alpha_complex_exact_interface * exact_ptr - cdef Alpha_complex_inexact_interface * inexact_ptr + cdef Alpha_complex_interface * this_ptr cdef bool fast cdef bool exact @@ -91,10 +82,7 @@ cdef class AlphaComplex: cdef vector[vector[double]] pts if off_file: if os.path.isfile(off_file): - if self.fast: - self.inexact_ptr = new Alpha_complex_inexact_interface(off_file.encode('utf-8'), True) - else: - self.exact_ptr = new Alpha_complex_exact_interface(off_file.encode('utf-8'), True) + self.this_ptr = new Alpha_complex_interface(off_file.encode('utf-8'), self.fast, True) else: print("file " + off_file + " not found.") else: @@ -102,28 +90,17 @@ cdef class AlphaComplex: # Empty Alpha construction points=[] pts = points - if self.fast: - with nogil: - self.inexact_ptr = new Alpha_complex_inexact_interface(pts) - else: - with nogil: - self.exact_ptr = new Alpha_complex_exact_interface(pts) + with nogil: + self.this_ptr = new Alpha_complex_interface(pts, self.fast) def __dealloc__(self): - if self.fast: - if self.inexact_ptr != NULL: - del self.inexact_ptr - else: - if self.exact_ptr != NULL: - del self.exact_ptr + if self.this_ptr != NULL: + del self.this_ptr def __is_defined(self): """Returns true if AlphaComplex pointer is not NULL. """ - if self.fast: - return self.inexact_ptr != NULL - else: - return self.exact_ptr != NULL + return self.this_ptr != NULL def get_point(self, vertex): """This function returns the point corresponding to a given vertex. @@ -133,10 +110,7 @@ cdef class AlphaComplex: :rtype: list of float :returns: the point. """ - if self.fast: - return self.inexact_ptr.get_point(vertex) - else: - return self.exact_ptr.get_point(vertex) + return self.this_ptr.get_point(vertex) def create_simplex_tree(self, max_alpha_square = float('inf'), default_filtration_value = False): """ @@ -153,12 +127,7 @@ cdef class AlphaComplex: cdef double mas = max_alpha_square cdef intptr_t stree_int_ptr=stree.thisptr cdef bool compute_filtration = default_filtration_value == True - if self.fast: - with nogil: - self.inexact_ptr.create_simplex_tree(stree_int_ptr, - mas, False, compute_filtration) - else: - with nogil: - self.exact_ptr.create_simplex_tree(stree_int_ptr, - mas, self.exact, compute_filtration) + with nogil: + self.this_ptr.create_simplex_tree(stree_int_ptr, + mas, self.exact, compute_filtration) return stree diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h index 3dd01345..46f2ba03 100644 --- a/src/python/include/Alpha_complex_interface.h +++ b/src/python/include/Alpha_complex_interface.h @@ -29,40 +29,75 @@ namespace Gudhi { namespace alpha_complex { -using Exact_kernel = CGAL::Epeck_d< CGAL::Dynamic_dimension_tag >; -using Inexact_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; - -template class Alpha_complex_interface { - using Point_d = typename Kernel::Point_d; + private: + using Exact_kernel = CGAL::Epeck_d< CGAL::Dynamic_dimension_tag >; + using Inexact_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; + using Point_exact_kernel = typename Exact_kernel::Point_d; + using Point_inexact_kernel = typename Inexact_kernel::Point_d; + + template + std::vector pt_cgal_to_cython(CgalPointType& ph) { + std::vector vd; + for (auto coord = ph.cartesian_begin(); coord != ph.cartesian_end(); coord++) + vd.push_back(CGAL::to_double(*coord)); + return vd; + } + + template + CgalPointType pt_cython_to_cgal(std::vector const& vec) { + return CgalPointType(vec.size(), vec.begin(), vec.end()); + } public: - Alpha_complex_interface(const std::vector>& points) { - auto mkpt = [](std::vector const& vec){ - return Point_d(vec.size(), vec.begin(), vec.end()); - }; - alpha_complex_ = std::make_unique>(boost::adaptors::transform(points, mkpt)); + Alpha_complex_interface(const std::vector>& points, bool fast_version) + : fast_version_(fast_version) { + auto pt = pt_cython_to_cgal(points[0]); + if (fast_version_) { + auto mkpt = [](std::vector const& vec) { + return Point_inexact_kernel(vec.size(), vec.begin(), vec.end()); + }; + ac_inexact_ptr_ = std::make_unique>(boost::adaptors::transform(points, mkpt)); + //ac_inexact_ptr_ = std::make_unique>(boost::adaptors::transform(points, pt_cython_to_cgal)); + } else { + auto mkpt = [](std::vector const& vec) { + return Point_exact_kernel(vec.size(), vec.begin(), vec.end()); + }; + ac_exact_ptr_ = std::make_unique>(boost::adaptors::transform(points, mkpt)); + //ac_exact_ptr_ = std::make_unique>(boost::adaptors::transform(points, pt_cython_to_cgal)); + } } - Alpha_complex_interface(const std::string& off_file_name, bool from_file = true) { - alpha_complex_ = std::make_unique>(off_file_name); + Alpha_complex_interface(const std::string& off_file_name, bool fast_version, bool from_file = true) + : fast_version_(fast_version) { + if (fast_version_) + ac_inexact_ptr_ = std::make_unique>(off_file_name); + else + ac_exact_ptr_ = std::make_unique>(off_file_name); } std::vector get_point(int vh) { - std::vector vd; - Point_d const& ph = alpha_complex_->get_point(vh); - for (auto coord = ph.cartesian_begin(); coord != ph.cartesian_end(); coord++) - vd.push_back(CGAL::to_double(*coord)); - return vd; + if (fast_version_) { + Point_inexact_kernel const& ph = ac_inexact_ptr_->get_point(vh); + return pt_cgal_to_cython(ph); + } else { + Point_exact_kernel const& ph = ac_exact_ptr_->get_point(vh); + return pt_cgal_to_cython(ph); + } } void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, bool exact_version, bool default_filtration_value) { - alpha_complex_->create_complex(*simplex_tree, max_alpha_square, exact_version, default_filtration_value); + if (fast_version_) + ac_inexact_ptr_->create_complex(*simplex_tree, max_alpha_square, exact_version, default_filtration_value); + else + ac_exact_ptr_->create_complex(*simplex_tree, max_alpha_square, exact_version, default_filtration_value); } private: - std::unique_ptr> alpha_complex_; + bool fast_version_; + std::unique_ptr> ac_exact_ptr_; + std::unique_ptr> ac_inexact_ptr_; }; } // namespace alpha_complex -- cgit v1.2.3 From a7decae3cdf47441cbd72c31e794176dbd3739c4 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 25 May 2020 08:21:47 +0200 Subject: C++ version and documentation --- src/python/doc/alpha_complex_user.rst | 21 ++++++++++++++++++-- src/python/include/Alpha_complex_interface.h | 29 +++++++++++----------------- 2 files changed, 30 insertions(+), 20 deletions(-) diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst index d49f45b4..c1ed0eaa 100644 --- a/src/python/doc/alpha_complex_user.rst +++ b/src/python/doc/alpha_complex_user.rst @@ -16,8 +16,25 @@ Definition Remarks ^^^^^^^ -When an :math:`\alpha`-complex is constructed with an infinite value of :math:`\alpha^2`, -the complex is a Delaunay complex (with special filtration values). +* When an :math:`\alpha`-complex is constructed with an infinite value of :math:`\alpha^2`, the complex is a Delaunay + complex (with special filtration values). The Delaunay complex without filtration values is also available by + passing :code:`default_filtration_value = True` to :func:`~gudhi.AlphaComplex.create_simplex_tree`. +* For people only interested in the topology of the Alpha complex (for instance persistence), Alpha complex is + equivalent to the `Čech complex `_ and much smaller if + you do not bound the radii. `Čech complex `_ can still + make sense in higher dimension precisely because you can bound the radii. +* Using the default :code:`complexity = 'safe'` makes the construction safe. + If you pass :code:`complexity = 'exact'` to :func:`~gudhi.AlphaComplex.__init__`, the filtration values are the exact + ones converted to the filtration value type of the simplicial complex. This can be very slow. + If you pass :code:`complexity = 'safe'` (the default) or :code:`complexity = 'fast'`, the filtration values are only + guaranteed to have a small multiplicative error compared to the exact value, see + `CGAL::Lazy_exact_nt::set_relative_precision_of_to_double `_ + for details. A drawback, when computing persistence, is that an empty exact interval [10^12,10^12] may become a + non-empty approximate interval [10^12,10^12+10^6]. + Using :code:`complexity = 'fast'` makes the computations slightly faster, and the combinatorics are still exact, but + the computation of filtration values can exceptionally be arbitrarily bad. In all cases, we still guarantee that the + output is a valid filtration (faces have a filtration value no larger than their cofaces). +* For performances reasons, it is advised to use Alpha_complex with `CGAL `_ :math:`\geq` 5.0.0. Example from points ------------------- diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h index 46f2ba03..dce9c8e9 100644 --- a/src/python/include/Alpha_complex_interface.h +++ b/src/python/include/Alpha_complex_interface.h @@ -31,8 +31,8 @@ namespace alpha_complex { class Alpha_complex_interface { private: - using Exact_kernel = CGAL::Epeck_d< CGAL::Dynamic_dimension_tag >; - using Inexact_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; + using Exact_kernel = CGAL::Epeck_d; + using Inexact_kernel = CGAL::Epick_d; using Point_exact_kernel = typename Exact_kernel::Point_d; using Point_inexact_kernel = typename Inexact_kernel::Point_d; @@ -45,31 +45,24 @@ class Alpha_complex_interface { } template - CgalPointType pt_cython_to_cgal(std::vector const& vec) { + static CgalPointType pt_cython_to_cgal(std::vector const& vec) { return CgalPointType(vec.size(), vec.begin(), vec.end()); } public: Alpha_complex_interface(const std::vector>& points, bool fast_version) - : fast_version_(fast_version) { - auto pt = pt_cython_to_cgal(points[0]); + : fast_version_(fast_version) { if (fast_version_) { - auto mkpt = [](std::vector const& vec) { - return Point_inexact_kernel(vec.size(), vec.begin(), vec.end()); - }; - ac_inexact_ptr_ = std::make_unique>(boost::adaptors::transform(points, mkpt)); - //ac_inexact_ptr_ = std::make_unique>(boost::adaptors::transform(points, pt_cython_to_cgal)); + ac_inexact_ptr_ = std::make_unique>( + boost::adaptors::transform(points, pt_cython_to_cgal)); } else { - auto mkpt = [](std::vector const& vec) { - return Point_exact_kernel(vec.size(), vec.begin(), vec.end()); - }; - ac_exact_ptr_ = std::make_unique>(boost::adaptors::transform(points, mkpt)); - //ac_exact_ptr_ = std::make_unique>(boost::adaptors::transform(points, pt_cython_to_cgal)); + ac_exact_ptr_ = std::make_unique>( + boost::adaptors::transform(points, pt_cython_to_cgal)); } } Alpha_complex_interface(const std::string& off_file_name, bool fast_version, bool from_file = true) - : fast_version_(fast_version) { + : fast_version_(fast_version) { if (fast_version_) ac_inexact_ptr_ = std::make_unique>(off_file_name); else @@ -86,8 +79,8 @@ class Alpha_complex_interface { } } - void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, - bool exact_version, bool default_filtration_value) { + void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square, bool exact_version, + bool default_filtration_value) { if (fast_version_) ac_inexact_ptr_->create_complex(*simplex_tree, max_alpha_square, exact_version, default_filtration_value); else -- cgit v1.2.3 From 55316205b2c7c2e3d7e3fe3ea92e20f3f8b29b11 Mon Sep 17 00:00:00 2001 From: yuichi-ike Date: Tue, 26 May 2020 18:22:14 +0900 Subject: test fixed, documents modified --- src/python/doc/rips_complex_user.rst | 6 ++++-- src/python/gudhi/dtm_rips_complex.py | 1 + src/python/test/test_dtm_rips_complex.py | 16 ++++------------ 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst index 450e6c1a..dd2f2cc0 100644 --- a/src/python/doc/rips_complex_user.rst +++ b/src/python/doc/rips_complex_user.rst @@ -378,7 +378,7 @@ Example from a point cloud combined with DistanceToMeasure ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Combining with DistanceToMeasure, one can compute the DTM-filtration of a point set, as in `this notebook `_. -Remark that DTMRipsComplex class provides exactly this function. +Remark that `DTMRipsComplex `_ class provides exactly this function. .. testcode:: @@ -400,10 +400,12 @@ The output is: [(0, (3.1622776601683795, inf)), (0, (3.1622776601683795, 5.39834563766817)), (0, (3.1622776601683795, 5.39834563766817))] +.. _dtm-rips-complex: + DTM Rips Complex ---------------- -`DTMRipsComplex `_ builds a simplicial complex from a point set or a full distence matrix (in the form of ndarray), as described in the above example. +`DTMRipsComplex `_ builds a simplicial complex from a point set or a full distance matrix (in the form of ndarray), as described in the above example. This class constructs a weighted Rips complex giving larger weights to outliers, which reduces their impact on the persistence diagram. See `this notebook `_ for some experiments. .. testcode:: diff --git a/src/python/gudhi/dtm_rips_complex.py b/src/python/gudhi/dtm_rips_complex.py index 70c8e5dd..d77ad36e 100644 --- a/src/python/gudhi/dtm_rips_complex.py +++ b/src/python/gudhi/dtm_rips_complex.py @@ -18,6 +18,7 @@ class DTMRipsComplex(WeightedRipsComplex): in the way described in :cite:`dtmfiltrations`. Remark that all the filtration values are doubled compared to the definition in the paper for the consistency with RipsComplex. + :Requires: `SciPy `_ """ def __init__(self, points=None, diff --git a/src/python/test/test_dtm_rips_complex.py b/src/python/test/test_dtm_rips_complex.py index 7cd2ad90..e1c0ee44 100644 --- a/src/python/test/test_dtm_rips_complex.py +++ b/src/python/test/test_dtm_rips_complex.py @@ -9,6 +9,7 @@ """ from gudhi.dtm_rips_complex import DTMRipsComplex +from gudhi import RipsComplex import numpy as np from math import sqrt import pytest @@ -25,16 +26,7 @@ def test_compatibility_with_rips(): distance_matrix = np.array([[0, 1, 1, sqrt(2)], [1, 0, sqrt(2), 1], [1, sqrt(2), 0, 1], [sqrt(2), 1, 1, 0]]) dtm_rips = DTMRipsComplex(distance_matrix=distance_matrix, max_filtration=42) st = dtm_rips.create_simplex_tree(max_dimension=1) - assert list(st.get_filtration()) == [ - ([0], 0.0), - ([1], 0.0), - ([2], 0.0), - ([3], 0.0), - ([0, 1], 1.0), - ([0, 2], 1.0), - ([1, 3], 1.0), - ([2, 3], 1.0), - ([1, 2], sqrt(2)), - ([0, 3], sqrt(2)), - ] + rips_complex = RipsComplex(distance_matrix=distance_matrix, max_edge_length=42) + st_from_rips = rips_complex.create_simplex_tree(max_dimension=1) + assert list(st.get_filtration()) == list(st_from_rips.get_filtration()) -- cgit v1.2.3 From 206f7002c8290cf826ca4a8b1905715f5d4670ae Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 26 May 2020 22:37:08 +0200 Subject: Fix #206 --- src/cmake/modules/GUDHI_user_version_target.cmake | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index 9cf648e3..c436f237 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -49,8 +49,18 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/CMakeGUDHIVersion.txt ${GUDHI_USER_VERSION_DIR}/CMakeGUDHIVersion.txt) -add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E - copy_directory ${CMAKE_SOURCE_DIR}/${GUDHI_PYTHON_PATH} ${GUDHI_USER_VERSION_DIR}/python) +# As cython generates .cpp files in source, we have to copy all except cpp files from python directory +file(GLOB_RECURSE PYTHON_FILES ${CMAKE_SOURCE_DIR}/${GUDHI_PYTHON_PATH}/*) +foreach(PYTHON_FILE ${PYTHON_FILES}) + get_filename_component(PYTHON_FILE_EXT ${PYTHON_FILE} EXT) + if (NOT "${PYTHON_FILE_EXT}" STREQUAL ".cpp") + string(REPLACE "${CMAKE_SOURCE_DIR}/${GUDHI_PYTHON_PATH}/" "" RELATIVE_PYTHON_FILE ${PYTHON_FILE}) + message("# # # ${RELATIVE_PYTHON_FILE}") + add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E + copy ${PYTHON_FILE} ${GUDHI_USER_VERSION_DIR}/python/${RELATIVE_PYTHON_FILE}) + endif() +endforeach() + add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/data ${GUDHI_USER_VERSION_DIR}/data) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E -- cgit v1.2.3 From f612ebdc37ba2e707867806b7532ccda149b773c Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 26 May 2020 22:40:58 +0200 Subject: Remove debug msg --- src/cmake/modules/GUDHI_user_version_target.cmake | 1 - 1 file changed, 1 deletion(-) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index c436f237..e99bb42d 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -55,7 +55,6 @@ foreach(PYTHON_FILE ${PYTHON_FILES}) get_filename_component(PYTHON_FILE_EXT ${PYTHON_FILE} EXT) if (NOT "${PYTHON_FILE_EXT}" STREQUAL ".cpp") string(REPLACE "${CMAKE_SOURCE_DIR}/${GUDHI_PYTHON_PATH}/" "" RELATIVE_PYTHON_FILE ${PYTHON_FILE}) - message("# # # ${RELATIVE_PYTHON_FILE}") add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy ${PYTHON_FILE} ${GUDHI_USER_VERSION_DIR}/python/${RELATIVE_PYTHON_FILE}) endif() -- cgit v1.2.3 From 10fc14b23d82ea8d18b55c624d5515ea38677056 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 27 May 2020 16:16:24 +0200 Subject: use requirements file for appveyor --- .appveyor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 34f42dea..d310ace9 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -48,8 +48,8 @@ install: - python --version - pip --version - python -m pip install --upgrade pip - - pip install -U setuptools numpy matplotlib scipy Cython pytest - - pip install -U POT pybind11 + - python -m pip install --user -r .github/build-requirements.txt + - python -m pip install --user -r .github/test-requirements.txt build_script: - mkdir build -- cgit v1.2.3 From 9732a6ad61513a6f9af0571d64441a8d816a4081 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 27 May 2020 22:59:14 +0200 Subject: try to workaround no PyKeOps on Windows --- .appveyor.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index d310ace9..d072a366 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -49,7 +49,8 @@ install: - pip --version - python -m pip install --upgrade pip - python -m pip install --user -r .github/build-requirements.txt - - python -m pip install --user -r .github/test-requirements.txt + # No PyKeOps on windows, let's workaround this one. + - for /F "tokens=*" %%A in (.github/test-requirements.txt) do python -m pip install --user %%A build_script: - mkdir build -- cgit v1.2.3 From a9e1f9bbb4b39bca0d59857f57f5182701532820 Mon Sep 17 00:00:00 2001 From: yuichi-ike Date: Thu, 28 May 2020 09:19:55 +0900 Subject: comment modified --- src/python/gudhi/dtm_rips_complex.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/gudhi/dtm_rips_complex.py b/src/python/gudhi/dtm_rips_complex.py index d77ad36e..63c9b138 100644 --- a/src/python/gudhi/dtm_rips_complex.py +++ b/src/python/gudhi/dtm_rips_complex.py @@ -28,7 +28,7 @@ class DTMRipsComplex(WeightedRipsComplex): max_filtration=float('inf')): """ Args: - points (Sequence[Sequence[float]]): list of points. + points (numpy.ndarray): array of points. distance_matrix (numpy.ndarray): full distance matrix. k (int): number of neighbors for the computation of DTM. Defaults to 1, which is equivalent to the usual Rips complex. q (float): order used to compute the distance to measure. Defaults to 2. -- cgit v1.2.3 From eae4534638cd0c152fbe42fa62ec3c638c05e214 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 28 May 2020 07:20:54 +0200 Subject: Let's try to fix sphinx compilationon Osx and Win --- azure-pipelines.yml | 2 +- src/cmake/modules/GUDHI_third_party_libraries.cmake | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 7b5334a7..29ec23d0 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -33,5 +33,5 @@ jobs: cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 .. make -j 4 make doxygen - ctest -j 4 --output-on-failure -E sphinx # remove sphinx build as it fails + ctest -j 4 --output-on-failure # -E sphinx remove sphinx build as it fails displayName: 'Build, test and documentation generation' diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index 0abe66b7..49295a82 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -199,7 +199,7 @@ if(PYTHONINTERP_FOUND AND CYTHON_FOUND) if(NOT SPHINX_PATH) if(PYTHON_VERSION_MAJOR EQUAL 3) # In Python3, just hack sphinx-build if it does not exist - set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "${CMAKE_CURRENT_SOURCE_DIR}/${GUDHI_PYTHON_PATH}/doc/python3-sphinx-build.py") + set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "-m sphinx.cmd.build") endif(PYTHON_VERSION_MAJOR EQUAL 3) endif(NOT SPHINX_PATH) endif(SPHINX_FOUND) -- cgit v1.2.3 From 0dfe5025dcfb1ac1db4e74394f7d7aa917ec8721 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 28 May 2020 08:33:16 +0200 Subject: remove sphinx test on azure and apveyor --- .appveyor.yml | 2 +- azure-pipelines.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index d072a366..d48ec43e 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -59,7 +59,7 @@ build_script: - if [%target%]==[Python] ( cd src/python & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & - ctest -j 1 --output-on-failure -C Release + ctest -j 1 --output-on-failure -C Release -E sphinx ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 --output-on-failure -C Release -E diff_files diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 29ec23d0..7b5334a7 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -33,5 +33,5 @@ jobs: cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 .. make -j 4 make doxygen - ctest -j 4 --output-on-failure # -E sphinx remove sphinx build as it fails + ctest -j 4 --output-on-failure -E sphinx # remove sphinx build as it fails displayName: 'Build, test and documentation generation' -- cgit v1.2.3 From 851afaab00a47f1469cb5819b4a26ea52bad988b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 28 May 2020 08:34:33 +0200 Subject: sphinx-build workaround differently --- src/python/doc/python3-sphinx-build.py | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100755 src/python/doc/python3-sphinx-build.py diff --git a/src/python/doc/python3-sphinx-build.py b/src/python/doc/python3-sphinx-build.py deleted file mode 100755 index 84d158cf..00000000 --- a/src/python/doc/python3-sphinx-build.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python3 - -""" -Emulate sphinx-build for python3 -""" - -from sys import exit, argv -from sphinx import main - -if __name__ == '__main__': - exit(main(argv)) -- cgit v1.2.3 From 20e57ccc35ef216bec5b1252b41a4aa1da566610 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 28 May 2020 10:59:36 +0200 Subject: Let's try this --- .appveyor.yml | 2 +- azure-pipelines.yml | 2 +- src/cmake/modules/GUDHI_third_party_libraries.cmake | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index d48ec43e..d072a366 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -59,7 +59,7 @@ build_script: - if [%target%]==[Python] ( cd src/python & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & - ctest -j 1 --output-on-failure -C Release -E sphinx + ctest -j 1 --output-on-failure -C Release ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 --output-on-failure -C Release -E diff_files diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 7b5334a7..29ec23d0 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -33,5 +33,5 @@ jobs: cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 .. make -j 4 make doxygen - ctest -j 4 --output-on-failure -E sphinx # remove sphinx build as it fails + ctest -j 4 --output-on-failure # -E sphinx remove sphinx build as it fails displayName: 'Build, test and documentation generation' diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index 49295a82..f92fe93e 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -199,7 +199,7 @@ if(PYTHONINTERP_FOUND AND CYTHON_FOUND) if(NOT SPHINX_PATH) if(PYTHON_VERSION_MAJOR EQUAL 3) # In Python3, just hack sphinx-build if it does not exist - set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "-m sphinx.cmd.build") + set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "-m" "sphinx.cmd.build") endif(PYTHON_VERSION_MAJOR EQUAL 3) endif(NOT SPHINX_PATH) endif(SPHINX_FOUND) -- cgit v1.2.3 From 194cacad8dabf69f41f105917b1afff62348b4ec Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 28 May 2020 21:45:31 +0200 Subject: Remove this test as it fails under windows --- src/python/doc/nerve_gic_complex_user.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/python/doc/nerve_gic_complex_user.rst b/src/python/doc/nerve_gic_complex_user.rst index 0e67fc78..abb7bf7a 100644 --- a/src/python/doc/nerve_gic_complex_user.rst +++ b/src/python/doc/nerve_gic_complex_user.rst @@ -99,9 +99,6 @@ the program output is: [-0.171433, 0.367393] [-0.909111, 0.745853] 0 interval(s) in dimension 1: - -.. testoutput:: - Nerve is of dimension 1 - 41 simplices - 21 vertices. [0] [1] -- cgit v1.2.3 From 47e5c110b6c647a8cb2069bd488fa45bb579cfeb Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 28 May 2020 22:01:35 +0200 Subject: just code, no test --- src/python/doc/nerve_gic_complex_user.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/doc/nerve_gic_complex_user.rst b/src/python/doc/nerve_gic_complex_user.rst index abb7bf7a..0b820abf 100644 --- a/src/python/doc/nerve_gic_complex_user.rst +++ b/src/python/doc/nerve_gic_complex_user.rst @@ -50,7 +50,7 @@ The cover C comes from the preimages of intervals (10 intervals with gain 0.3) covering the height function (coordinate 2), which are then refined into their connected components using the triangulation of the .OFF file. -.. testcode:: +.. code-block:: python import gudhi nerve_complex = gudhi.CoverComplex() -- cgit v1.2.3 From 14ee986e2d1802b7b40e3319bea787b5d1624b06 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Fri, 29 May 2020 21:35:02 +0200 Subject: Rewrite some summaries --- src/python/doc/alpha_complex_sum.inc | 14 ++++++-------- src/python/doc/alpha_complex_user.rst | 4 +++- src/python/doc/cubical_complex_sum.inc | 6 +++--- src/python/doc/index.rst | 4 ++-- src/python/doc/persistent_cohomology_sum.inc | 4 +--- src/python/doc/rips_complex_sum.inc | 18 +++++------------- src/python/doc/tangential_complex_sum.inc | 8 ++++---- 7 files changed, 24 insertions(+), 34 deletions(-) diff --git a/src/python/doc/alpha_complex_sum.inc b/src/python/doc/alpha_complex_sum.inc index 3aba0d71..aeab493f 100644 --- a/src/python/doc/alpha_complex_sum.inc +++ b/src/python/doc/alpha_complex_sum.inc @@ -3,15 +3,13 @@ +----------------------------------------------------------------+-------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------+ | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau | - | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | | - | :alt: Alpha complex representation | | :Since: GUDHI 2.0.0 | - | :figclass: align-center | The filtration value of each simplex is computed as the **square** of | | - | | the circumradius of the simplex if the circumsphere is empty (the | :License: MIT (`GPL v3 `_) | - | | simplex is then said to be Gabriel), and as the minimum of the | | - | | filtration values of the codimension 1 cofaces that make it not | :Requires: `Eigen `_ :math:`\geq` 3.1.0 and `CGAL `_ :math:`\geq` 4.11.0 | - | | Gabriel otherwise. | | + | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. It has the same persistent homology | | + | :alt: Alpha complex representation | as the Čech complex and is significantly smaller. | :Since: GUDHI 2.0.0 | + | :figclass: align-center | | | + | | | :License: MIT (`GPL v3 `_) | + | | | | + | | | :Requires: `Eigen `_ :math:`\geq` 3.1.0 and `CGAL `_ :math:`\geq` 4.11.0 | | | | | - | | For performances reasons, it is advised to use CGAL :math:`\geq` 5.0.0. | | +----------------------------------------------------------------+-------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------+ | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` | +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst index d49f45b4..e31194a7 100644 --- a/src/python/doc/alpha_complex_user.rst +++ b/src/python/doc/alpha_complex_user.rst @@ -9,7 +9,7 @@ Definition .. include:: alpha_complex_sum.inc -`AlphaComplex` is constructing a :doc:`SimplexTree ` using +:doc:`AlphaComplex ` is constructing a :doc:`SimplexTree ` using `Delaunay Triangulation `_ :cite:`cgal:hdj-t-19b` from the `Computational Geometry Algorithms Library `_ :cite:`cgal:eb-19b`. @@ -19,6 +19,8 @@ Remarks When an :math:`\alpha`-complex is constructed with an infinite value of :math:`\alpha^2`, the complex is a Delaunay complex (with special filtration values). +For performances reasons, it is advised to use CGAL :math:`\geq` 5.0.0. + Example from points ------------------- diff --git a/src/python/doc/cubical_complex_sum.inc b/src/python/doc/cubical_complex_sum.inc index 28bf8e94..87db184d 100644 --- a/src/python/doc/cubical_complex_sum.inc +++ b/src/python/doc/cubical_complex_sum.inc @@ -2,9 +2,9 @@ :widths: 30 40 30 +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ - | .. figure:: | The cubical complex is an example of a structured complex useful in | :Author: Pawel Dlotko | - | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | | - | :alt: Cubical complex representation | analysis. | :Since: GUDHI 2.0.0 | + | .. figure:: | The cubical complex represents a grid as a cell complex with | :Author: Pawel Dlotko | + | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | cells of all dimensions. | | + | :alt: Cubical complex representation | | :Since: GUDHI 2.0.0 | | :figclass: align-center | | | | | | :License: MIT | | | | | diff --git a/src/python/doc/index.rst b/src/python/doc/index.rst index 13e51047..05bc18b4 100644 --- a/src/python/doc/index.rst +++ b/src/python/doc/index.rst @@ -53,8 +53,8 @@ Tangential complex Topological descriptors computation *********************************** -Persistence cohomology -====================== +Persistent cohomology +===================== .. include:: persistent_cohomology_sum.inc diff --git a/src/python/doc/persistent_cohomology_sum.inc b/src/python/doc/persistent_cohomology_sum.inc index a1ff2eee..58e44b8a 100644 --- a/src/python/doc/persistent_cohomology_sum.inc +++ b/src/python/doc/persistent_cohomology_sum.inc @@ -6,9 +6,7 @@ | ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | | | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Since: GUDHI 2.0.0 | | | homology studies the evolution -- birth, life and death -- of these | | - | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :License: MIT | - | Torus | theory is essentially composed of three elements: topological spaces, | | - | | their homology groups and an evolution scheme. | | + | Rips Persistent Cohomology on a 3D Torus | features when the topological space is changing. | :License: MIT | | | | | | | Computation of persistent cohomology using the algorithm of | | | | :cite:`DBLP:journals/dcg/SilvaMV11` and | | diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc index 9cd8074b..c123ea2a 100644 --- a/src/python/doc/rips_complex_sum.inc +++ b/src/python/doc/rips_complex_sum.inc @@ -2,21 +2,13 @@ :widths: 30 40 30 +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ - | .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse | - | ../../doc/Rips_complex/rips_complex_representation.png | graph. | | + | .. figure:: | The Vietoris-Rips complex is a simplicial complex built as the | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse | + | ../../doc/Rips_complex/rips_complex_representation.png | clique-complex of a proximity graph. | | | :figclass: align-center | | :Since: GUDHI 2.0.0 | - | | The filtration value of each edge is computed from a user-given | | - | | distance function and is inserted until a user-given threshold | :License: MIT | - | | value. | | + | | We also provide sparse approximations, to speed-up the computation | | + | | of persistent homology, and weighted versions, which are more robust | :License: MIT | + | | to outliers. | | | | | | - | | This complex can be built from a point cloud and a distance function, | | - | | or from a distance matrix. | | - | | | | - | | Weighted Rips complex constructs a simplicial complex from a distance | | - | | matrix and weights on vertices. | | - | | | | - | | DTM Rips complex builds a simplicial complex from a point set or | | - | | a distance matrix. | | +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+ | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` | +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/tangential_complex_sum.inc b/src/python/doc/tangential_complex_sum.inc index 22314a2d..2f330a07 100644 --- a/src/python/doc/tangential_complex_sum.inc +++ b/src/python/doc/tangential_complex_sum.inc @@ -3,10 +3,10 @@ +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------+ | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin | - | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | | - | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Since: GUDHI 2.0.0 | - | | an unknown manifold. The running time depends only linearly on the | | - | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :License: MIT (`GPL v3 `_) | + | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in | | + | :figclass: align-center | :math:`d`-dimensional Euclidean space. The input is a point sample | :Since: GUDHI 2.0.0 | + | | coming from an unknown manifold. The running time depends only linearly| | + | | on the extrinsic dimension :math:`d` and exponentially on the intrinsic| :License: MIT (`GPL v3 `_) | | | dimension :math:`k`. | | | | | :Requires: `Eigen `_ :math:`\geq` 3.1.0 and `CGAL `_ :math:`\geq` 4.11.0 | +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------+ -- cgit v1.2.3 From 17ffc79e1db329fa4366666a15b373d200df90b1 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 29 May 2020 22:23:31 +0200 Subject: Try to build wheels --- Dockerfile_for_pip | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 Dockerfile_for_pip diff --git a/Dockerfile_for_pip b/Dockerfile_for_pip new file mode 100644 index 00000000..0f4fa8a0 --- /dev/null +++ b/Dockerfile_for_pip @@ -0,0 +1,46 @@ +FROM quay.io/pypa/manylinux2014_x86_64 + +RUN yum -y update && yum -y install \ + wget \ + zlib-devel \ + eigen3-devel \ + mpfr-devel \ + gmp-devel \ + && yum clean all + +RUN mkdir -p /opt/cmake \ + && wget https://github.com/Kitware/CMake/releases/download/v3.16.2/cmake-3.16.2-Linux-x86_64.sh \ + && sh cmake-3.16.2-Linux-x86_64.sh --skip-license --prefix=/opt/cmake \ + && rm -f cmake-3.16.2-Linux-x86_64.sh + +RUN wget https://dl.bintray.com/boostorg/release/1.64.0/source/boost_1_64_0.tar.gz \ + && tar xf boost_1_64_0.tar.gz \ + && cd boost_1_64_0 \ + && ./bootstrap.sh --with-libraries=filesystem,program_options,system,thread,test \ + && ./b2 install \ + && cd .. \ + && rm -rf boost_1_64_0 + +RUN wget https://github.com/CGAL/cgal/releases/download/releases%2FCGAL-5.0.2/CGAL-5.0.2.tar.xz \ + && tar xf CGAL-5.0.2.tar.xz \ + && mkdir build \ + && cd build \ + && /opt/cmake/bin/cmake -DCMAKE_BUILD_TYPE=Release ../CGAL-5.0.2/ \ + && make install \ + && cd .. \ + && rm -rf build CGAL-5.0.2 + +ADD .github/build-requirements.txt / + +RUN /opt/python/cp35-cp35m/bin/pip install -r build-requirements.txt \ + && /opt/python/cp36-cp36m/bin/pip install -r build-requirements.txt\ + && /opt/python/cp37-cp37m/bin/pip install -r build-requirements.txt\ + && /opt/python/cp38-cp38/bin/pip install -r build-requirements.txt\ + && /opt/python/cp38-cp38/bin/pip install twine + +ENV PYTHON35="/opt/python/cp35-cp35m/" +ENV PYTHON36="/opt/python/cp36-cp36m/" +ENV PYTHON37="/opt/python/cp37-cp37m/" +ENV PYTHON38="/opt/python/cp38-cp38/" + +ENV PATH="/opt/cmake/bin:${PATH}" -- cgit v1.2.3 From 5140e8571d15fb0026dd63a67a75d0ef770a1751 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 30 May 2020 07:45:15 +0200 Subject: Add github actions --- .github/workflows/pip-packaging-linux.yml | 59 +++++++++++++++++++++++++++++ .github/workflows/pip-packaging-osx.yml | 43 +++++++++++++++++++++ .github/workflows/pip-packaging-windows.yml | 43 +++++++++++++++++++++ 3 files changed, 145 insertions(+) create mode 100644 .github/workflows/pip-packaging-linux.yml create mode 100644 .github/workflows/pip-packaging-osx.yml create mode 100644 .github/workflows/pip-packaging-windows.yml diff --git a/.github/workflows/pip-packaging-linux.yml b/.github/workflows/pip-packaging-linux.yml new file mode 100644 index 00000000..6ca28588 --- /dev/null +++ b/.github/workflows/pip-packaging-linux.yml @@ -0,0 +1,59 @@ +name: pip packaging linux + +on: + push: {} + pull_request: {} +# on: +# release: +# types: [published] + +jobs: + build: + name: build pip wheels + runs-on: ubuntu-latest + container: gudhi/pip_for_gudhi + steps: + - uses: actions/checkout@v1 + with: + submodules: true + - name: Build wheels for Python 3.5 + run: | + mkdir build_35 + cd build_35 + cmake -DCMAKE_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=$PYTHON35/bin/python .. + cd src/python + $PYTHON35/bin/python setup.py bdist_wheel + auditwheel repair dist/*.whl + - name: Build wheels for Python 3.6 + run: | + mkdir build_36 + cd build_36 + cmake -DCMAKE_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=$PYTHON36/bin/python .. + cd src/python + $PYTHON36/bin/python setup.py bdist_wheel + auditwheel repair dist/*.whl + - name: Build wheels for Python 3.7 + run: | + mkdir build_37 + cd build_37 + cmake -DCMAKE_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=$PYTHON37/bin/python .. + cd src/python + $PYTHON37/bin/python setup.py bdist_wheel + auditwheel repair dist/*.whl + - name: Build wheels for Python 3.8 + run: | + mkdir build_38 + cd build_38 + cmake -DCMAKE_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=$PYTHON38/bin/python .. + cd src/python + $PYTHON38/bin/python setup.py bdist_wheel + auditwheel repair dist/*.whl + - name: Publish on PyPi + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + $PYTHON38/bin/twine upload build_35/src/python/wheelhouse/* + $PYTHON38/bin/twine upload build_36/src/python/wheelhouse/* + $PYTHON38/bin/twine upload build_37/src/python/wheelhouse/* + $PYTHON38/bin/twine upload build_38/src/python/wheelhouse/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml new file mode 100644 index 00000000..a571f8e0 --- /dev/null +++ b/.github/workflows/pip-packaging-osx.yml @@ -0,0 +1,43 @@ +name: pip packaging osx + +on: + push: {} + pull_request: {} +# on: +# release: +# types: [published] + +jobs: + build: + runs-on: macos-latest + strategy: + max-parallel: 4 + matrix: + python-version: ['3.5', '3.6', '3.7', '3.8'] + name: Build wheels for Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v1 + with: + submodules: true + - uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - name: Install dependencies + run: | + brew update && brew install boost eigen gmp mpfr cgal + python -m pip install --user -r .github/build-requirements.txt + python -m pip install --user twine + - name: Build python wheel + run: | + python --version + mkdir build + cd build + cmake -DCMAKE_BUILD_TYPE=Release -DPython_ADDITIONAL_VERSIONS=3 .. + cd src/python + python setup.py bdist_wheel + - name: Publish on PyPi + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: twine upload dist/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml new file mode 100644 index 00000000..18c6aa94 --- /dev/null +++ b/.github/workflows/pip-packaging-windows.yml @@ -0,0 +1,43 @@ +name: pip packaging osx + +on: + push: {} + pull_request: {} +# on: +# release: +# types: [published] + +jobs: + build: + runs-on: windows-latest + strategy: + max-parallel: 4 + matrix: + python-version: ['3.5', '3.6', '3.7', '3.8'] + name: Build wheels for Python ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v1 + with: + submodules: true + - uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - name: Install dependencies + run: | + vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows + python -m pip install --user -r .github/build-requirements.txt + python -m pip install --user twine + - name: Build python wheel + run: | + python --version + mkdir build + cd build + cmake -DCMAKE_BUILD_TYPE=Release -DPython_ADDITIONAL_VERSIONS=3 .. + cd src/python + python setup.py bdist_wheel + - name: Publish on PyPi + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: twine upload dist/* \ No newline at end of file -- cgit v1.2.3 From 22132d53dd343f0d5f6515cfc2c692b3c3a9a93d Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 30 May 2020 08:01:19 +0200 Subject: name typo --- .github/workflows/pip-packaging-windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 18c6aa94..ccd1e647 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -1,4 +1,4 @@ -name: pip packaging osx +name: pip packaging windows on: push: {} -- cgit v1.2.3 From 93cd84c7421340ce66f80509856fb43e7cb72803 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 30 May 2020 08:09:44 +0200 Subject: help cmake to find vcpkg installations --- .github/workflows/pip-packaging-windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index ccd1e647..a718c8f9 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -33,7 +33,7 @@ jobs: python --version mkdir build cd build - cmake -DCMAKE_BUILD_TYPE=Release -DPython_ADDITIONAL_VERSIONS=3 .. + cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python python setup.py bdist_wheel - name: Publish on PyPi -- cgit v1.2.3 From 769bd322ace39ea712d0a21cd02f3598fd8ac149 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 30 May 2020 08:28:46 +0200 Subject: use python -m twine as recommended --- .github/workflows/pip-packaging-linux.yml | 8 ++++---- .github/workflows/pip-packaging-osx.yml | 2 +- .github/workflows/pip-packaging-windows.yml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pip-packaging-linux.yml b/.github/workflows/pip-packaging-linux.yml index 6ca28588..7f5a8df6 100644 --- a/.github/workflows/pip-packaging-linux.yml +++ b/.github/workflows/pip-packaging-linux.yml @@ -53,7 +53,7 @@ jobs: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - $PYTHON38/bin/twine upload build_35/src/python/wheelhouse/* - $PYTHON38/bin/twine upload build_36/src/python/wheelhouse/* - $PYTHON38/bin/twine upload build_37/src/python/wheelhouse/* - $PYTHON38/bin/twine upload build_38/src/python/wheelhouse/* \ No newline at end of file + $PYTHON38/bin/python -m twine upload build_35/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload build_36/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload build_37/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload build_38/src/python/wheelhouse/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index a571f8e0..97a3c7ea 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -40,4 +40,4 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: twine upload dist/* \ No newline at end of file + run: python -m twine upload dist/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index a718c8f9..5c890b68 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -40,4 +40,4 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: twine upload dist/* \ No newline at end of file + run: python -m twine upload dist/* \ No newline at end of file -- cgit v1.2.3 From 9813f905ac8dbc814d3ff4f078df4125779b467f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 08:22:02 +0200 Subject: Upload on testpypi --- .github/workflows/pip-packaging-linux.yml | 8 ++++---- .github/workflows/pip-packaging-osx.yml | 2 +- .github/workflows/pip-packaging-windows.yml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pip-packaging-linux.yml b/.github/workflows/pip-packaging-linux.yml index 7f5a8df6..485ef643 100644 --- a/.github/workflows/pip-packaging-linux.yml +++ b/.github/workflows/pip-packaging-linux.yml @@ -53,7 +53,7 @@ jobs: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - $PYTHON38/bin/python -m twine upload build_35/src/python/wheelhouse/* - $PYTHON38/bin/python -m twine upload build_36/src/python/wheelhouse/* - $PYTHON38/bin/python -m twine upload build_37/src/python/wheelhouse/* - $PYTHON38/bin/python -m twine upload build_38/src/python/wheelhouse/* \ No newline at end of file + $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_35/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_36/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_37/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_38/src/python/wheelhouse/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index 97a3c7ea..c5b99ab6 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -40,4 +40,4 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: python -m twine upload dist/* \ No newline at end of file + run: python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 5c890b68..3a70a142 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -40,4 +40,4 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: python -m twine upload dist/* \ No newline at end of file + run: python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/* \ No newline at end of file -- cgit v1.2.3 From 884b53725f76c85196e783d7853ea9cc6d207378 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 08:38:51 +0200 Subject: Debug traces on osx --- .github/workflows/pip-packaging-osx.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index c5b99ab6..28dd1d53 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -36,8 +36,11 @@ jobs: cmake -DCMAKE_BUILD_TYPE=Release -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python python setup.py bdist_wheel + pwd - name: Publish on PyPi env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/* \ No newline at end of file + run: | + pwd + python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* \ No newline at end of file -- cgit v1.2.3 From c4c2e870662528467ef130b3e6a9f76682a86fd1 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 08:50:33 +0200 Subject: no disjoint sets found --- .github/workflows/pip-packaging-windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 3a70a142..33ba0de9 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -25,7 +25,7 @@ jobs: architecture: x64 - name: Install dependencies run: | - vcpkg install tbb:x64-windows boost-disjoint-sets:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows + vcpkg install tbb:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows python -m pip install --user -r .github/build-requirements.txt python -m pip install --user twine - name: Build python wheel -- cgit v1.2.3 From 974a045acfdae6a9980d9473f9dc3ece716aa0e1 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 09:23:14 +0200 Subject: Add some fields for PyPi nice render --- .github/workflows/pip-packaging-osx.yml | 2 -- .github/workflows/pip-packaging-windows.yml | 4 ++-- src/python/setup.py.in | 12 +++++++++++- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index 28dd1d53..f30b89d8 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -36,11 +36,9 @@ jobs: cmake -DCMAKE_BUILD_TYPE=Release -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python python setup.py bdist_wheel - pwd - name: Publish on PyPi env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - pwd python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 33ba0de9..687d62a9 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -25,7 +25,7 @@ jobs: architecture: x64 - name: Install dependencies run: | - vcpkg install tbb:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows + vcpkg install tbb:x64-windows boost-graph:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows python -m pip install --user -r .github/build-requirements.txt python -m pip install --user twine - name: Build python wheel @@ -40,4 +40,4 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/* \ No newline at end of file + run: python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* \ No newline at end of file diff --git a/src/python/setup.py.in b/src/python/setup.py.in index b9f4e3f0..55d2b604 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -68,7 +68,17 @@ setup( author='GUDHI Editorial Board', author_email='gudhi-contact@lists.gforge.inria.fr', version='@GUDHI_VERSION@', - url='http://gudhi.gforge.inria.fr/', + url='https://gudhi.inria.fr/', + project_urls={ + 'Bug Tracker': 'https://github.com/GUDHI/gudhi-devel/issues', + 'Documentation': 'https://gudhi.inria.fr/python/latest/', + 'Source Code': 'https://github.com/GUDHI/gudhi-devel', + 'License': 'https://gudhi.inria.fr/licensing/' + }, + description='The Gudhi library is an open source library for Computational Topology and Topological Data ' \ + 'Analysis (TDA). It offers state-of-the-art algorithms to construct various types of simplicial complexes, ' \ + 'data structures to represent them, and algorithms to compute geometric approximations of shapes and ' \ + 'persistent homology.', ext_modules = ext_modules, install_requires = ['cython','numpy >= 1.9',], setup_requires = ['numpy >= 1.9','pybind11',], -- cgit v1.2.3 From 7bc7712b14501c212e5b162d7317f118c6586546 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 09:28:54 +0200 Subject: Try to find gmp --- .github/workflows/pip-packaging-windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 687d62a9..53abc036 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -33,7 +33,7 @@ jobs: python --version mkdir build cd build - cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DPython_ADDITIONAL_VERSIONS=3 .. + cmake -DCMAKE_BUILD_TYPE=Release -DGMP_INCLUDE_DIR="c:/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/vcpkg/installed/x64-windows/lib" -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python python setup.py bdist_wheel - name: Publish on PyPi -- cgit v1.2.3 From cfee936a422733f995fd1f7ae2ea7baa0cc07deb Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 09:54:19 +0200 Subject: Re-build all from scratch --- .github/workflows/pip-packaging-linux.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pip-packaging-linux.yml b/.github/workflows/pip-packaging-linux.yml index 485ef643..6b099243 100644 --- a/.github/workflows/pip-packaging-linux.yml +++ b/.github/workflows/pip-packaging-linux.yml @@ -3,6 +3,7 @@ name: pip packaging linux on: push: {} pull_request: {} + # on: # release: # types: [published] -- cgit v1.2.3 From be25848cb28a4f7bde1d21afef1bbb6fe0ba4a5b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 10:04:58 +0200 Subject: Cannot use a version already used... Even on testpypi... --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index ac89fa4d..59226274 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0) +set (GUDHI_PATCH_VERSION 0a0) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 433a8cdaa111ebb777450fc1ee2e316480e4b46a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 10:36:47 +0200 Subject: Add introduction to pypi description and new release version --- CMakeGUDHIVersion.txt | 2 +- src/python/CMakeLists.txt | 3 +++ src/python/setup.py.in | 14 ++++++++++---- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 59226274..3d8be42d 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a0) +set (GUDHI_PATCH_VERSION 0a1) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index ab08cd6d..d4cb7477 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -235,6 +235,9 @@ if(PYTHONINTERP_FOUND) file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + # Other .py files + file(COPY "introduction.md" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") + add_custom_command( OUTPUT gudhi.so WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} diff --git a/src/python/setup.py.in b/src/python/setup.py.in index 55d2b604..ff000a2c 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -13,6 +13,7 @@ from Cython.Build import cythonize from numpy import get_include as numpy_get_include import sys import pybind11 +from os import path __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" @@ -62,6 +63,11 @@ for module in pybind11_modules: runtime_library_dirs=runtime_library_dirs, )) +# read the contents of introduction.md +this_directory = path.abspath(path.dirname(__file__)) +with open(path.join(this_directory, 'introduction.md'), encoding='utf-8') as f: + long_description = f.read() + setup( name = 'gudhi', packages=find_packages(), # find_namespace_packages(include=["gudhi*"]) @@ -75,10 +81,10 @@ setup( 'Source Code': 'https://github.com/GUDHI/gudhi-devel', 'License': 'https://gudhi.inria.fr/licensing/' }, - description='The Gudhi library is an open source library for Computational Topology and Topological Data ' \ - 'Analysis (TDA). It offers state-of-the-art algorithms to construct various types of simplicial complexes, ' \ - 'data structures to represent them, and algorithms to compute geometric approximations of shapes and ' \ - 'persistent homology.', + description='The Gudhi library is an open source library for ' \ + 'Computational Topology and Topological Data Analysis (TDA).', + long_description=long_description, + long_description_content_type='text/markdown', ext_modules = ext_modules, install_requires = ['cython','numpy >= 1.9',], setup_requires = ['numpy >= 1.9','pybind11',], -- cgit v1.2.3 From 3b91ae863718c402a991c5807579f578d266a04b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 10:43:07 +0200 Subject: add md file --- src/python/introduction.md | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 src/python/introduction.md diff --git a/src/python/introduction.md b/src/python/introduction.md new file mode 100644 index 00000000..3277e0ac --- /dev/null +++ b/src/python/introduction.md @@ -0,0 +1,30 @@ +![](https://gudhi.inria.fr/images/gudhi_architecture.png) + +#Introduction + +The Gudhi library is an open source library for Computational Topology and +Topological Data Analysis (TDA). It offers state-of-the-art algorithms +to construct various types of simplicial complexes, data structures to +represent them, and algorithms to compute geometric approximations of shapes +and persistent homology. + +The GUDHI library offers the following interoperable modules: +* Complexes: + * Cubical + * Simplicial: Rips, Witness, Alpha and Čech complexes + * Cover: Nerve and Graph induced complexes +* Data structures and basic operations: + * Simplex tree, Skeleton blockers and Toplex map + * Construction, update, filtration and simplification +* Topological descriptors computation +* Manifold reconstruction +* Topological descriptors tools: + * Bottleneck distance + * Statistical tools + * Persistence diagram and barcode + +For more information about Topological Data Analysis and its workflow, please +refer to the [Wikipedia TDA dedicated page][1]. + + [1]: https://en.wikipedia.org/wiki/Topological_data_analysis + -- cgit v1.2.3 From 5141caeff03f1e8c8c4ccae1ee7ca43fbcb2925f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 11:01:24 +0200 Subject: It was not markdown but rst... --- src/python/CMakeLists.txt | 2 +- src/python/introduction.md | 30 ------------------------------ src/python/introduction.rst | 33 +++++++++++++++++++++++++++++++++ src/python/setup.py.in | 2 +- 4 files changed, 35 insertions(+), 32 deletions(-) delete mode 100644 src/python/introduction.md create mode 100644 src/python/introduction.rst diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index d4cb7477..fee6b6f5 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -236,7 +236,7 @@ if(PYTHONINTERP_FOUND) file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") # Other .py files - file(COPY "introduction.md" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") + file(COPY "introduction.rst" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") add_custom_command( OUTPUT gudhi.so diff --git a/src/python/introduction.md b/src/python/introduction.md deleted file mode 100644 index 3277e0ac..00000000 --- a/src/python/introduction.md +++ /dev/null @@ -1,30 +0,0 @@ -![](https://gudhi.inria.fr/images/gudhi_architecture.png) - -#Introduction - -The Gudhi library is an open source library for Computational Topology and -Topological Data Analysis (TDA). It offers state-of-the-art algorithms -to construct various types of simplicial complexes, data structures to -represent them, and algorithms to compute geometric approximations of shapes -and persistent homology. - -The GUDHI library offers the following interoperable modules: -* Complexes: - * Cubical - * Simplicial: Rips, Witness, Alpha and Čech complexes - * Cover: Nerve and Graph induced complexes -* Data structures and basic operations: - * Simplex tree, Skeleton blockers and Toplex map - * Construction, update, filtration and simplification -* Topological descriptors computation -* Manifold reconstruction -* Topological descriptors tools: - * Bottleneck distance - * Statistical tools - * Persistence diagram and barcode - -For more information about Topological Data Analysis and its workflow, please -refer to the [Wikipedia TDA dedicated page][1]. - - [1]: https://en.wikipedia.org/wiki/Topological_data_analysis - diff --git a/src/python/introduction.rst b/src/python/introduction.rst new file mode 100644 index 00000000..2cc4642e --- /dev/null +++ b/src/python/introduction.rst @@ -0,0 +1,33 @@ +.. figure:: + https://gudhi.inria.fr/images/gudhi_architecture.png + :figclass: align-center + :width: 100 % + :alt: GUDHI + +Introduction +============ + +The Gudhi library is an open source library for Computational Topology and +Topological Data Analysis (TDA). It offers state-of-the-art algorithms +to construct various types of simplicial complexes, data structures to +represent them, and algorithms to compute geometric approximations of shapes +and persistent homology. + +The GUDHI library offers the following interoperable modules: + +* Complexes: + * Cubical + * Simplicial: Rips, Witness, Alpha and Čech complexes + * Cover: Nerve and Graph induced complexes +* Data structures and basic operations: + * Simplex tree, Skeleton blockers and Toplex map + * Construction, update, filtration and simplification +* Topological descriptors computation +* Manifold reconstruction +* Topological descriptors tools: + * Bottleneck distance + * Statistical tools + * Persistence diagram and barcode + +For more information about Topological Data Analysis and its workflow, please +refer to the `Wikipedia TDA dedicated page `_. diff --git a/src/python/setup.py.in b/src/python/setup.py.in index ff000a2c..4725bb44 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -65,7 +65,7 @@ for module in pybind11_modules: # read the contents of introduction.md this_directory = path.abspath(path.dirname(__file__)) -with open(path.join(this_directory, 'introduction.md'), encoding='utf-8') as f: +with open(path.join(this_directory, 'introduction.rst'), encoding='utf-8') as f: long_description = f.read() setup( -- cgit v1.2.3 From b5dd88baaad901a66e047caef9a4ea72af00b6e6 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sun, 31 May 2020 11:13:43 +0200 Subject: release increment --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 3d8be42d..3ebdf68c 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a1) +set (GUDHI_PATCH_VERSION 0a2) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 524c5f3e8c48c25ad96d2e43634e73c18d8805c5 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jun 2020 08:26:48 +0200 Subject: try to fix pypi doc --- CMakeGUDHIVersion.txt | 2 +- src/python/introduction.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 3ebdf68c..d344f04f 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a2) +set (GUDHI_PATCH_VERSION 0a3) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/introduction.rst b/src/python/introduction.rst index 2cc4642e..adfed36e 100644 --- a/src/python/introduction.rst +++ b/src/python/introduction.rst @@ -1,4 +1,4 @@ -.. figure:: +.. image:: https://gudhi.inria.fr/images/gudhi_architecture.png :figclass: align-center :width: 100 % -- cgit v1.2.3 From 999ca34a509f6f6222a343306b725ce0bbbce795 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jun 2020 10:12:45 +0200 Subject: Try to fix rst doc for Pypi --- CMakeGUDHIVersion.txt | 2 +- src/python/introduction.rst | 1 - src/python/setup.py.in | 4 ++-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index d344f04f..75aedb74 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a3) +set (GUDHI_PATCH_VERSION 0a4) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/introduction.rst b/src/python/introduction.rst index adfed36e..3e9faae9 100644 --- a/src/python/introduction.rst +++ b/src/python/introduction.rst @@ -1,6 +1,5 @@ .. image:: https://gudhi.inria.fr/images/gudhi_architecture.png - :figclass: align-center :width: 100 % :alt: GUDHI diff --git a/src/python/setup.py.in b/src/python/setup.py.in index 4725bb44..f8eeafc4 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -63,7 +63,7 @@ for module in pybind11_modules: runtime_library_dirs=runtime_library_dirs, )) -# read the contents of introduction.md +# read the contents of introduction.rst this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'introduction.rst'), encoding='utf-8') as f: long_description = f.read() @@ -83,8 +83,8 @@ setup( }, description='The Gudhi library is an open source library for ' \ 'Computational Topology and Topological Data Analysis (TDA).', + long_description_content_type='text/x-rst', long_description=long_description, - long_description_content_type='text/markdown', ext_modules = ext_modules, install_requires = ['cython','numpy >= 1.9',], setup_requires = ['numpy >= 1.9','pybind11',], -- cgit v1.2.3 From 6d113d120d7e07df4909a7de1ae014cfb3571f38 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jun 2020 10:30:02 +0200 Subject: Try to fix rst doc for Pypi --- CMakeGUDHIVersion.txt | 2 +- src/python/introduction.rst | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 75aedb74..b790b828 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a4) +set (GUDHI_PATCH_VERSION 0a5) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/introduction.rst b/src/python/introduction.rst index 3e9faae9..90d9a381 100644 --- a/src/python/introduction.rst +++ b/src/python/introduction.rst @@ -1,7 +1,4 @@ -.. image:: - https://gudhi.inria.fr/images/gudhi_architecture.png - :width: 100 % - :alt: GUDHI +.. image:: https://gudhi.inria.fr/images/gudhi_architecture.png Introduction ============ -- cgit v1.2.3 From 7ff9d4ceeb659cac3af10a9f9d80c7267c47363a Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jun 2020 14:46:40 +0200 Subject: Let's go like this --- CMakeGUDHIVersion.txt | 2 +- src/python/introduction.rst | 5 ----- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index b790b828..ffcbc53e 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a5) +set (GUDHI_PATCH_VERSION 0a6) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/introduction.rst b/src/python/introduction.rst index 90d9a381..11c06ac5 100644 --- a/src/python/introduction.rst +++ b/src/python/introduction.rst @@ -1,8 +1,3 @@ -.. image:: https://gudhi.inria.fr/images/gudhi_architecture.png - -Introduction -============ - The Gudhi library is an open source library for Computational Topology and Topological Data Analysis (TDA). It offers state-of-the-art algorithms to construct various types of simplicial complexes, data structures to -- cgit v1.2.3 From 4aff8dc700a0790373d82ae24076359c09ee04c8 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Mon, 1 Jun 2020 15:24:28 +0200 Subject: Interface for hera's bottleneck_distance --- ext/hera | 2 +- .../modules/GUDHI_third_party_libraries.cmake | 1 + src/cmake/modules/GUDHI_user_version_target.cmake | 2 +- src/python/CMakeLists.txt | 8 +++- src/python/doc/bottleneck_distance_user.rst | 18 +++++-- src/python/gudhi/hera.cc | 56 ---------------------- src/python/gudhi/hera/__init__.py | 2 + src/python/gudhi/hera/bottleneck.cc | 45 +++++++++++++++++ src/python/gudhi/hera/wasserstein.cc | 56 ++++++++++++++++++++++ src/python/setup.py.in | 6 ++- src/python/test/test_bottleneck_distance.py | 6 ++- 11 files changed, 134 insertions(+), 68 deletions(-) delete mode 100644 src/python/gudhi/hera.cc create mode 100644 src/python/gudhi/hera/__init__.py create mode 100644 src/python/gudhi/hera/bottleneck.cc create mode 100644 src/python/gudhi/hera/wasserstein.cc diff --git a/ext/hera b/ext/hera index 0019cae9..2c5e6c60 160000 --- a/ext/hera +++ b/ext/hera @@ -1 +1 @@ -Subproject commit 0019cae9dc1e9d11aa03bc59681435ba7f21eea8 +Subproject commit 2c5e6c606ee37cd68bbe9f9915dba99f7677dd87 diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index f92fe93e..d80283d2 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -69,6 +69,7 @@ endif() # For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera. set(HERA_WASSERSTEIN_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include CACHE PATH "Directory where one can find Hera's wasserstein.h") +set(HERA_BOTTLENECK_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include CACHE PATH "Directory where one can find Hera's bottleneck.h") option(WITH_GUDHI_USE_TBB "Build with Intel TBB parallelization" ON) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index e99bb42d..491fa459 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -68,7 +68,7 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E - copy_directory ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include ${GUDHI_USER_VERSION_DIR}/ext/hera/wasserstein/include) + copy_directory ${CMAKE_SOURCE_DIR}/ext/hera ${GUDHI_USER_VERSION_DIR}/ext/hera) set(GUDHI_DIRECTORIES "doc;example;concept;utilities") diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 96dd3f6f..1e81cac8 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -130,7 +130,8 @@ if(PYTHONINTERP_FOUND) set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'reader_utils', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'witness_complex', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'strong_witness_complex', ") - set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'hera', ") + set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'hera/wasserstein', ") + set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'hera/bottleneck', ") if (NOT CGAL_VERSION VERSION_LESS 4.11.0) set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'bottleneck', ") set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ") @@ -236,6 +237,7 @@ if(PYTHONINTERP_FOUND) file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/dtm_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + file(COPY "gudhi/hera/__init__.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi/hera") add_custom_command( OUTPUT gudhi.so @@ -355,7 +357,9 @@ if(PYTHONINTERP_FOUND) COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}" ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py") - add_gudhi_py_test(test_bottleneck_distance) + if (PYBIND11_FOUND) + add_gudhi_py_test(test_bottleneck_distance) + endif() # Cover complex file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) diff --git a/src/python/doc/bottleneck_distance_user.rst b/src/python/doc/bottleneck_distance_user.rst index 89da89d3..49bd3706 100644 --- a/src/python/doc/bottleneck_distance_user.rst +++ b/src/python/doc/bottleneck_distance_user.rst @@ -9,14 +9,22 @@ Definition .. include:: bottleneck_distance_sum.inc -This implementation is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" -:cite:`DBLP:journals/algorithmica/EfratIK01`. Another relevant publication, although it was not used is -"Geometry Helps to Compare Persistence Diagrams" :cite:`Kerber:2017:GHC:3047249.3064175`. +This implementation by François Godi is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" +:cite:`DBLP:journals/algorithmica/EfratIK01` and requires `CGAL `_. -Function --------- .. autofunction:: gudhi.bottleneck_distance +This other implementation comes from `Hera +`_ (BSD-3-Clause) which is +based on "Geometry Helps to Compare Persistence Diagrams" +:cite:`Kerber:2017:GHC:3047249.3064175` by Michael Kerber, Dmitriy +Morozov, and Arnur Nigmetov. + +Beware that its approximation allows for a multiplicative error, while the function above uses an additive error. + +.. autofunction:: gudhi.hera.bottleneck_distance + + Distance computation -------------------- diff --git a/src/python/gudhi/hera.cc b/src/python/gudhi/hera.cc deleted file mode 100644 index ea80a9a8..00000000 --- a/src/python/gudhi/hera.cc +++ /dev/null @@ -1,56 +0,0 @@ -/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - * Author(s): Marc Glisse - * - * Copyright (C) 2020 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#include // Hera - -#include - -double wasserstein_distance( - Dgm d1, Dgm d2, - double wasserstein_power, double internal_p, - double delta) -{ - // I *think* the call to request() has to be before releasing the GIL. - auto diag1 = numpy_to_range_of_pairs(d1); - auto diag2 = numpy_to_range_of_pairs(d2); - - py::gil_scoped_release release; - - hera::AuctionParams params; - params.wasserstein_power = wasserstein_power; - // hera encodes infinity as -1... - if(std::isinf(internal_p)) internal_p = hera::get_infinity(); - params.internal_p = internal_p; - params.delta = delta; - // The extra parameters are purposedly not exposed for now. - return hera::wasserstein_dist(diag1, diag2, params); -} - -PYBIND11_MODULE(hera, m) { - m.def("wasserstein_distance", &wasserstein_distance, - py::arg("X"), py::arg("Y"), - py::arg("order") = 1, - py::arg("internal_p") = std::numeric_limits::infinity(), - py::arg("delta") = .01, - R"pbdoc( - Compute the Wasserstein distance between two diagrams. - Points at infinity are supported. - - Parameters: - X (n x 2 numpy array): First diagram - Y (n x 2 numpy array): Second diagram - order (float): Wasserstein exponent W_q - internal_p (float): Internal Minkowski norm L^p in R^2 - delta (float): Relative error 1+delta - - Returns: - float: Approximate Wasserstein distance W_q(X,Y) - )pbdoc"); -} diff --git a/src/python/gudhi/hera/__init__.py b/src/python/gudhi/hera/__init__.py new file mode 100644 index 00000000..044f81cd --- /dev/null +++ b/src/python/gudhi/hera/__init__.py @@ -0,0 +1,2 @@ +from .wasserstein import wasserstein_distance +from .bottleneck import bottleneck_distance diff --git a/src/python/gudhi/hera/bottleneck.cc b/src/python/gudhi/hera/bottleneck.cc new file mode 100644 index 00000000..e00b4682 --- /dev/null +++ b/src/python/gudhi/hera/bottleneck.cc @@ -0,0 +1,45 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Marc Glisse + * + * Copyright (C) 2020 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#include // Hera + +#include + +double bottleneck_distance(Dgm d1, Dgm d2, double delta) +{ + // I *think* the call to request() has to be before releasing the GIL. + auto diag1 = numpy_to_range_of_pairs(d1); + auto diag2 = numpy_to_range_of_pairs(d2); + + py::gil_scoped_release release; + + if (delta == 0) + return hera::bottleneckDistExact(diag1, diag2); + else + return hera::bottleneckDistApprox(diag1, diag2, delta); +} + +PYBIND11_MODULE(bottleneck, m) { + m.def("bottleneck_distance", &bottleneck_distance, + py::arg("X"), py::arg("Y"), + py::arg("delta") = .01, + R"pbdoc( + Compute the Bottleneck distance between two diagrams. + Points at infinity are supported. + + Parameters: + X (n x 2 numpy array): First diagram + Y (n x 2 numpy array): Second diagram + delta (float): Relative error 1+delta + + Returns: + float: (approximate) bottleneck distance d_B(X,Y) + )pbdoc"); +} diff --git a/src/python/gudhi/hera/wasserstein.cc b/src/python/gudhi/hera/wasserstein.cc new file mode 100644 index 00000000..1a21f02f --- /dev/null +++ b/src/python/gudhi/hera/wasserstein.cc @@ -0,0 +1,56 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Marc Glisse + * + * Copyright (C) 2020 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#include // Hera + +#include + +double wasserstein_distance( + Dgm d1, Dgm d2, + double wasserstein_power, double internal_p, + double delta) +{ + // I *think* the call to request() has to be before releasing the GIL. + auto diag1 = numpy_to_range_of_pairs(d1); + auto diag2 = numpy_to_range_of_pairs(d2); + + py::gil_scoped_release release; + + hera::AuctionParams params; + params.wasserstein_power = wasserstein_power; + // hera encodes infinity as -1... + if(std::isinf(internal_p)) internal_p = hera::get_infinity(); + params.internal_p = internal_p; + params.delta = delta; + // The extra parameters are purposedly not exposed for now. + return hera::wasserstein_dist(diag1, diag2, params); +} + +PYBIND11_MODULE(wasserstein, m) { + m.def("wasserstein_distance", &wasserstein_distance, + py::arg("X"), py::arg("Y"), + py::arg("order") = 1, + py::arg("internal_p") = std::numeric_limits::infinity(), + py::arg("delta") = .01, + R"pbdoc( + Compute the Wasserstein distance between two diagrams. + Points at infinity are supported. + + Parameters: + X (n x 2 numpy array): First diagram + Y (n x 2 numpy array): Second diagram + order (float): Wasserstein exponent W_q + internal_p (float): Internal Minkowski norm L^p in R^2 + delta (float): Relative error 1+delta + + Returns: + float: Approximate Wasserstein distance W_q(X,Y) + )pbdoc"); +} diff --git a/src/python/setup.py.in b/src/python/setup.py.in index b9f4e3f0..9b3c7521 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -48,10 +48,12 @@ ext_modules = cythonize(ext_modules) for module in pybind11_modules: my_include_dirs = include_dirs + [pybind11.get_include(False), pybind11.get_include(True)] - if module == 'hera': + if module == 'hera/wasserstein': my_include_dirs = ['@HERA_WASSERSTEIN_INCLUDE_DIR@'] + my_include_dirs + elif module == 'hera/bottleneck': + my_include_dirs = ['@HERA_BOTTLENECK_INCLUDE_DIR@'] + my_include_dirs ext_modules.append(Extension( - 'gudhi.' + module, + 'gudhi.' + module.replace('/', '.'), sources = [source_dir + module + '.cc'], language = 'c++', include_dirs = my_include_dirs, diff --git a/src/python/test/test_bottleneck_distance.py b/src/python/test/test_bottleneck_distance.py index 70b2abad..6915bea8 100755 --- a/src/python/test/test_bottleneck_distance.py +++ b/src/python/test/test_bottleneck_distance.py @@ -9,6 +9,8 @@ """ import gudhi +import gudhi.hera +import pytest __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" @@ -19,5 +21,7 @@ def test_basic_bottleneck(): diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]] diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]] - assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == 0.8081763781405569 assert gudhi.bottleneck_distance(diag1, diag2) == 0.75 + assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == pytest.approx(0.75, abs=0.1) + assert gudhi.hera.bottleneck_distance(diag1, diag2, 0) == 0.75 + assert gudhi.hera.bottleneck_distance(diag1, diag2, 0.1) == pytest.approx(0.75, rel=0.1) -- cgit v1.2.3 From 59d3fa0aa4ed6628f0dfbe3424c1d7a5c6f9a382 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 1 Jun 2020 16:05:32 +0200 Subject: Simplify setup.py to read introduction.rst --- CMakeGUDHIVersion.txt | 2 +- src/python/setup.py.in | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index ffcbc53e..95e67372 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a6) +set (GUDHI_PATCH_VERSION 0a7) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/setup.py.in b/src/python/setup.py.in index f8eeafc4..dc163a37 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -13,7 +13,6 @@ from Cython.Build import cythonize from numpy import get_include as numpy_get_include import sys import pybind11 -from os import path __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" @@ -64,9 +63,8 @@ for module in pybind11_modules: )) # read the contents of introduction.rst -this_directory = path.abspath(path.dirname(__file__)) -with open(path.join(this_directory, 'introduction.rst'), encoding='utf-8') as f: - long_description = f.read() +with open("introduction.rst", "r") as fh: + long_description = fh.read() setup( name = 'gudhi', -- cgit v1.2.3 From cc42bcdf3323f2eb6edeaca105d29b32b394ca66 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Tue, 2 Jun 2020 07:33:31 +0200 Subject: Parallelism in pairwise_distances --- src/python/gudhi/representations/kernel_methods.py | 14 +++++------ src/python/gudhi/representations/metrics.py | 27 +++++++++++++++++----- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/src/python/gudhi/representations/kernel_methods.py b/src/python/gudhi/representations/kernel_methods.py index 596f4f07..c9bd9d01 100644 --- a/src/python/gudhi/representations/kernel_methods.py +++ b/src/python/gudhi/representations/kernel_methods.py @@ -10,7 +10,7 @@ import numpy as np from sklearn.base import BaseEstimator, TransformerMixin from sklearn.metrics import pairwise_distances, pairwise_kernels -from .metrics import SlicedWassersteinDistance, PersistenceFisherDistance, _sklearn_wrapper, pairwise_persistence_diagram_distances, _sliced_wasserstein_distance, _persistence_fisher_distance +from .metrics import SlicedWassersteinDistance, PersistenceFisherDistance, _sklearn_wrapper, _pairwise, pairwise_persistence_diagram_distances, _sliced_wasserstein_distance, _persistence_fisher_distance from .preprocessing import Padding ############################################# @@ -60,7 +60,7 @@ def _persistence_scale_space_kernel(D1, D2, kernel_approx=None, bandwidth=1.): weight_pss = lambda x: 1 if x[1] >= x[0] else -1 return 0.5 * _persistence_weighted_gaussian_kernel(DD1, DD2, weight=weight_pss, kernel_approx=kernel_approx, bandwidth=bandwidth) -def pairwise_persistence_diagram_kernels(X, Y=None, kernel="sliced_wasserstein", **kwargs): +def pairwise_persistence_diagram_kernels(X, Y=None, kernel="sliced_wasserstein", n_jobs=None, **kwargs): """ This function computes the kernel matrix between two lists of persistence diagrams given as numpy arrays of shape (nx2). @@ -76,15 +76,15 @@ def pairwise_persistence_diagram_kernels(X, Y=None, kernel="sliced_wasserstein", XX = np.reshape(np.arange(len(X)), [-1,1]) YY = None if Y is None else np.reshape(np.arange(len(Y)), [-1,1]) if kernel == "sliced_wasserstein": - return np.exp(-pairwise_persistence_diagram_distances(X, Y, metric="sliced_wasserstein", num_directions=kwargs["num_directions"]) / kwargs["bandwidth"]) + return np.exp(-pairwise_persistence_diagram_distances(X, Y, metric="sliced_wasserstein", num_directions=kwargs["num_directions"], n_jobs=n_jobs) / kwargs["bandwidth"]) elif kernel == "persistence_fisher": - return np.exp(-pairwise_persistence_diagram_distances(X, Y, metric="persistence_fisher", kernel_approx=kwargs["kernel_approx"], bandwidth=kwargs["bandwidth"]) / kwargs["bandwidth_fisher"]) + return np.exp(-pairwise_persistence_diagram_distances(X, Y, metric="persistence_fisher", kernel_approx=kwargs["kernel_approx"], bandwidth=kwargs["bandwidth"], n_jobs=n_jobs) / kwargs["bandwidth_fisher"]) elif kernel == "persistence_scale_space": - return pairwise_kernels(XX, YY, metric=_sklearn_wrapper(_persistence_scale_space_kernel, X, Y, **kwargs)) + return _pairwise(pairwise_kernels, False, XX, YY, metric=_sklearn_wrapper(_persistence_scale_space_kernel, X, Y, **kwargs), n_jobs=n_jobs) elif kernel == "persistence_weighted_gaussian": - return pairwise_kernels(XX, YY, metric=_sklearn_wrapper(_persistence_weighted_gaussian_kernel, X, Y, **kwargs)) + return _pairwise(pairwise_kernels, False, XX, YY, metric=_sklearn_wrapper(_persistence_weighted_gaussian_kernel, X, Y, **kwargs), n_jobs=n_jobs) else: - return pairwise_kernels(XX, YY, metric=_sklearn_wrapper(metric, **kwargs)) + return _pairwise(pairwise_kernels, False, XX, YY, metric=_sklearn_wrapper(metric, **kwargs), n_jobs=n_jobs) class SlicedWassersteinKernel(BaseEstimator, TransformerMixin): """ diff --git a/src/python/gudhi/representations/metrics.py b/src/python/gudhi/representations/metrics.py index 8a32f7e9..23bccd68 100644 --- a/src/python/gudhi/representations/metrics.py +++ b/src/python/gudhi/representations/metrics.py @@ -12,6 +12,7 @@ from sklearn.base import BaseEstimator, TransformerMixin from sklearn.metrics import pairwise_distances from gudhi.hera import wasserstein_distance as hera_wasserstein_distance from .preprocessing import Padding +from joblib import Parallel, delayed, effective_n_jobs ############################################# # Metrics ################################### @@ -116,6 +117,20 @@ def _persistence_fisher_distance(D1, D2, kernel_approx=None, bandwidth=1.): vectorj = vectorj/vectorj_sum return np.arccos( min(np.dot(np.sqrt(vectori), np.sqrt(vectorj)), 1.) ) +def _pairwise(fallback, skipdiag, X, Y, metric, n_jobs): + if Y is not None: + return fallback(X, Y, metric=metric, n_jobs=n_jobs) + triu = np.triu_indices(len(X), k=skipdiag) + tril = (triu[1], triu[0]) + par = Parallel(n_jobs=n_jobs, prefer="threads") + d = par(delayed(metric)([triu[0][i]], [triu[1][i]]) for i in range(len(triu[0]))) + m = np.empty((len(X), len(X))) + m[triu] = d + m[tril] = d + if skipdiag: + np.fill_diagonal(m, 0) + return m + def _sklearn_wrapper(metric, X, Y, **kwargs): """ This function is a wrapper for any metric between two persistence diagrams that takes two numpy arrays of shapes (nx2) and (mx2) as arguments. @@ -134,7 +149,7 @@ PAIRWISE_DISTANCE_FUNCTIONS = { "persistence_fisher": _persistence_fisher_distance, } -def pairwise_persistence_diagram_distances(X, Y=None, metric="bottleneck", **kwargs): +def pairwise_persistence_diagram_distances(X, Y=None, metric="bottleneck", n_jobs=None, **kwargs): """ This function computes the distance matrix between two lists of persistence diagrams given as numpy arrays of shape (nx2). @@ -152,25 +167,25 @@ def pairwise_persistence_diagram_distances(X, Y=None, metric="bottleneck", **kwa if metric == "bottleneck": try: from .. import bottleneck_distance - return pairwise_distances(XX, YY, metric=_sklearn_wrapper(bottleneck_distance, X, Y, **kwargs)) + return _pairwise(pairwise_distances, True, XX, YY, metric=_sklearn_wrapper(bottleneck_distance, X, Y, **kwargs), n_jobs=n_jobs) except ImportError: print("Gudhi built without CGAL") raise elif metric == "pot_wasserstein": try: from gudhi.wasserstein import wasserstein_distance as pot_wasserstein_distance - return pairwise_distances(XX, YY, metric=_sklearn_wrapper(pot_wasserstein_distance, X, Y, **kwargs)) + return _pairwise(pairwise_distances, True, XX, YY, metric=_sklearn_wrapper(pot_wasserstein_distance, X, Y, **kwargs), n_jobs=n_jobs) except ImportError: print("POT (Python Optimal Transport) is not installed. Please install POT or use metric='wasserstein' or metric='hera_wasserstein'") raise elif metric == "sliced_wasserstein": Xproj = _compute_persistence_diagram_projections(X, **kwargs) Yproj = None if Y is None else _compute_persistence_diagram_projections(Y, **kwargs) - return pairwise_distances(XX, YY, metric=_sklearn_wrapper(_sliced_wasserstein_distance_on_projections, Xproj, Yproj)) + return _pairwise(pairwise_distances, True, XX, YY, metric=_sklearn_wrapper(_sliced_wasserstein_distance_on_projections, Xproj, Yproj), n_jobs=n_jobs) elif type(metric) == str: - return pairwise_distances(XX, YY, metric=_sklearn_wrapper(PAIRWISE_DISTANCE_FUNCTIONS[metric], X, Y, **kwargs)) + return _pairwise(pairwise_distances, True, XX, YY, metric=_sklearn_wrapper(PAIRWISE_DISTANCE_FUNCTIONS[metric], X, Y, **kwargs), n_jobs=n_jobs) else: - return pairwise_distances(XX, YY, metric=_sklearn_wrapper(metric, X, Y, **kwargs)) + return _pairwise(pairwise_distances, True, XX, YY, metric=_sklearn_wrapper(metric, X, Y, **kwargs), n_jobs=n_jobs) class SlicedWassersteinDistance(BaseEstimator, TransformerMixin): """ -- cgit v1.2.3 From aebfa07217f58943753967cfd3d7daa9f28d5650 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Tue, 2 Jun 2020 17:56:18 +0200 Subject: More n_jobs in metrics --- src/python/gudhi/representations/metrics.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/src/python/gudhi/representations/metrics.py b/src/python/gudhi/representations/metrics.py index 23bccd68..84907160 100644 --- a/src/python/gudhi/representations/metrics.py +++ b/src/python/gudhi/representations/metrics.py @@ -12,7 +12,7 @@ from sklearn.base import BaseEstimator, TransformerMixin from sklearn.metrics import pairwise_distances from gudhi.hera import wasserstein_distance as hera_wasserstein_distance from .preprocessing import Padding -from joblib import Parallel, delayed, effective_n_jobs +from joblib import Parallel, delayed ############################################# # Metrics ################################### @@ -157,6 +157,7 @@ def pairwise_persistence_diagram_distances(X, Y=None, metric="bottleneck", n_job X (list of n numpy arrays of shape (numx2)): first list of persistence diagrams. Y (list of m numpy arrays of shape (numx2)): second list of persistence diagrams (optional). If None, pairwise distances are computed from the first list only. metric: distance to use. It can be either a string ("sliced_wasserstein", "wasserstein", "hera_wasserstein" (Wasserstein distance computed with Hera---note that Hera is also used for the default option "wasserstein"), "pot_wasserstein" (Wasserstein distance computed with POT), "bottleneck", "persistence_fisher") or a function taking two numpy arrays of shape (nx2) and (mx2) as inputs. If it is a function, make sure that it is symmetric and that it outputs 0 if called on the same two arrays. + n_jobs (int): number of jobs to use for the computation. This uses joblib.Parallel(prefer="threads"), so metrics that do not release the GIL may not scale unless run inside a `joblib.parallel_backend `_ block. **kwargs: optional keyword parameters. Any further parameters are passed directly to the distance function. See the docs of the various distance classes in this module. Returns: @@ -191,14 +192,16 @@ class SlicedWassersteinDistance(BaseEstimator, TransformerMixin): """ This is a class for computing the sliced Wasserstein distance matrix from a list of persistence diagrams. The Sliced Wasserstein distance is computed by projecting the persistence diagrams onto lines, comparing the projections with the 1-norm, and finally integrating over all possible lines. See http://proceedings.mlr.press/v70/carriere17a.html for more details. """ - def __init__(self, num_directions=10): + def __init__(self, num_directions=10, n_jobs=None): """ Constructor for the SlicedWassersteinDistance class. Parameters: num_directions (int): number of lines evenly sampled from [-pi/2,pi/2] in order to approximate and speed up the distance computation (default 10). + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_distances` for details. """ self.num_directions = num_directions + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -221,7 +224,7 @@ class SlicedWassersteinDistance(BaseEstimator, TransformerMixin): Returns: numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise sliced Wasserstein distances. """ - return pairwise_persistence_diagram_distances(X, self.diagrams_, metric="sliced_wasserstein", num_directions=self.num_directions) + return pairwise_persistence_diagram_distances(X, self.diagrams_, metric="sliced_wasserstein", num_directions=self.num_directions, n_jobs=self.n_jobs) def __call__(self, diag1, diag2): """ @@ -242,14 +245,16 @@ class BottleneckDistance(BaseEstimator, TransformerMixin): :Requires: `CGAL `_ :math:`\geq` 4.11.0 """ - def __init__(self, epsilon=None): + def __init__(self, epsilon=None, n_jobs=None): """ Constructor for the BottleneckDistance class. Parameters: epsilon (double): absolute (additive) error tolerated on the distance (default is the smallest positive float), see :func:`gudhi.bottleneck_distance`. + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_distances` for details. """ self.epsilon = epsilon + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -272,7 +277,7 @@ class BottleneckDistance(BaseEstimator, TransformerMixin): Returns: numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise bottleneck distances. """ - Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric="bottleneck", e=self.epsilon) + Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric="bottleneck", e=self.epsilon, n_jobs=self.n_jobs) return Xfit def __call__(self, diag1, diag2): @@ -297,15 +302,17 @@ class PersistenceFisherDistance(BaseEstimator, TransformerMixin): """ This is a class for computing the persistence Fisher distance matrix from a list of persistence diagrams. The persistence Fisher distance is obtained by computing the original Fisher distance between the probability distributions associated to the persistence diagrams given by convolving them with a Gaussian kernel. See http://papers.nips.cc/paper/8205-persistence-fisher-kernel-a-riemannian-manifold-kernel-for-persistence-diagrams for more details. """ - def __init__(self, bandwidth=1., kernel_approx=None): + def __init__(self, bandwidth=1., kernel_approx=None, n_jobs=None): """ Constructor for the PersistenceFisherDistance class. Parameters: bandwidth (double): bandwidth of the Gaussian kernel used to turn persistence diagrams into probability distributions (default 1.). kernel_approx (class): kernel approximation class used to speed up computation (default None). Common kernel approximations classes can be found in the scikit-learn library (such as RBFSampler for instance). + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_distances` for details. """ self.bandwidth, self.kernel_approx = bandwidth, kernel_approx + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -328,7 +335,7 @@ class PersistenceFisherDistance(BaseEstimator, TransformerMixin): Returns: numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise persistence Fisher distances. """ - return pairwise_persistence_diagram_distances(X, self.diagrams_, metric="persistence_fisher", bandwidth=self.bandwidth, kernel_approx=self.kernel_approx) + return pairwise_persistence_diagram_distances(X, self.diagrams_, metric="persistence_fisher", bandwidth=self.bandwidth, kernel_approx=self.kernel_approx, n_jobs=self.n_jobs) def __call__(self, diag1, diag2): """ @@ -347,7 +354,7 @@ class WassersteinDistance(BaseEstimator, TransformerMixin): """ This is a class for computing the Wasserstein distance matrix from a list of persistence diagrams. """ - def __init__(self, order=2, internal_p=2, mode="pot", delta=0.01): + def __init__(self, order=2, internal_p=2, mode="pot", delta=0.01, n_jobs=None): """ Constructor for the WassersteinDistance class. @@ -356,10 +363,12 @@ class WassersteinDistance(BaseEstimator, TransformerMixin): internal_p (int): ground metric on the (upper-half) plane (i.e. norm l_p in R^2), default value is 2 (euclidean norm), see :func:`gudhi.wasserstein.wasserstein_distance`. mode (str): method for computing Wasserstein distance. Either "pot" or "hera". delta (float): relative error 1+delta. Used only if mode == "hera". + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_distances` for details. """ self.order, self.internal_p, self.mode = order, internal_p, mode self.metric = "pot_wasserstein" if mode == "pot" else "hera_wasserstein" self.delta = delta + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -383,9 +392,9 @@ class WassersteinDistance(BaseEstimator, TransformerMixin): numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise Wasserstein distances. """ if self.metric == "hera_wasserstein": - Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p, delta=self.delta) + Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p, delta=self.delta, n_jobs=self.n_jobs) else: - Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p, matching=False) + Xfit = pairwise_persistence_diagram_distances(X, self.diagrams_, metric=self.metric, order=self.order, internal_p=self.internal_p, matching=False, n_jobs=self.n_jobs) return Xfit def __call__(self, diag1, diag2): -- cgit v1.2.3 From 7706056bb9c0396188201570f9399e636df63df7 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Tue, 2 Jun 2020 18:08:52 +0200 Subject: n_jobs for kernels --- src/python/gudhi/representations/kernel_methods.py | 25 +++++++++++++++------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/src/python/gudhi/representations/kernel_methods.py b/src/python/gudhi/representations/kernel_methods.py index c9bd9d01..6e4f0619 100644 --- a/src/python/gudhi/representations/kernel_methods.py +++ b/src/python/gudhi/representations/kernel_methods.py @@ -68,6 +68,7 @@ def pairwise_persistence_diagram_kernels(X, Y=None, kernel="sliced_wasserstein", X (list of n numpy arrays of shape (numx2)): first list of persistence diagrams. Y (list of m numpy arrays of shape (numx2)): second list of persistence diagrams (optional). If None, pairwise kernel values are computed from the first list only. kernel: kernel to use. It can be either a string ("sliced_wasserstein", "persistence_scale_space", "persistence_weighted_gaussian", "persistence_fisher") or a function taking two numpy arrays of shape (nx2) and (mx2) as inputs. If it is a function, make sure that it is symmetric. + n_jobs (int): number of jobs to use for the computation. This uses joblib.Parallel(prefer="threads"), so kernels that do not release the GIL may not scale unless run inside a `joblib.parallel_backend `_ block. **kwargs: optional keyword parameters. Any further parameters are passed directly to the kernel function. See the docs of the various kernel classes in this module. Returns: @@ -90,16 +91,18 @@ class SlicedWassersteinKernel(BaseEstimator, TransformerMixin): """ This is a class for computing the sliced Wasserstein kernel matrix from a list of persistence diagrams. The sliced Wasserstein kernel is computed by exponentiating the corresponding sliced Wasserstein distance with a Gaussian kernel. See http://proceedings.mlr.press/v70/carriere17a.html for more details. """ - def __init__(self, num_directions=10, bandwidth=1.0): + def __init__(self, num_directions=10, bandwidth=1.0, n_jobs=None): """ Constructor for the SlicedWassersteinKernel class. Parameters: bandwidth (double): bandwidth of the Gaussian kernel applied to the sliced Wasserstein distance (default 1.). num_directions (int): number of lines evenly sampled from [-pi/2,pi/2] in order to approximate and speed up the kernel computation (default 10). + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_kernels` for details. """ self.bandwidth = bandwidth self.num_directions = num_directions + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -122,7 +125,7 @@ class SlicedWassersteinKernel(BaseEstimator, TransformerMixin): Returns: numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise sliced Wasserstein kernel values. """ - return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="sliced_wasserstein", bandwidth=self.bandwidth, num_directions=self.num_directions) + return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="sliced_wasserstein", bandwidth=self.bandwidth, num_directions=self.num_directions, n_jobs=self.n_jobs) def __call__(self, diag1, diag2): """ @@ -141,7 +144,7 @@ class PersistenceWeightedGaussianKernel(BaseEstimator, TransformerMixin): """ This is a class for computing the persistence weighted Gaussian kernel matrix from a list of persistence diagrams. The persistence weighted Gaussian kernel is computed by convolving the persistence diagram points with weighted Gaussian kernels. See http://proceedings.mlr.press/v48/kusano16.html for more details. """ - def __init__(self, bandwidth=1., weight=lambda x: 1, kernel_approx=None): + def __init__(self, bandwidth=1., weight=lambda x: 1, kernel_approx=None, n_jobs=None): """ Constructor for the PersistenceWeightedGaussianKernel class. @@ -149,9 +152,11 @@ class PersistenceWeightedGaussianKernel(BaseEstimator, TransformerMixin): bandwidth (double): bandwidth of the Gaussian kernel with which persistence diagrams will be convolved (default 1.) weight (function): weight function for the persistence diagram points (default constant function, ie lambda x: 1). This function must be defined on 2D points, ie lists or numpy arrays of the form [p_x,p_y]. kernel_approx (class): kernel approximation class used to speed up computation (default None). Common kernel approximations classes can be found in the scikit-learn library (such as RBFSampler for instance). + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_kernels` for details. """ self.bandwidth, self.weight = bandwidth, weight self.kernel_approx = kernel_approx + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -174,7 +179,7 @@ class PersistenceWeightedGaussianKernel(BaseEstimator, TransformerMixin): Returns: numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise persistence weighted Gaussian kernel values. """ - return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="persistence_weighted_gaussian", bandwidth=self.bandwidth, weight=self.weight, kernel_approx=self.kernel_approx) + return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="persistence_weighted_gaussian", bandwidth=self.bandwidth, weight=self.weight, kernel_approx=self.kernel_approx, n_jobs=self.n_jobs) def __call__(self, diag1, diag2): """ @@ -193,15 +198,17 @@ class PersistenceScaleSpaceKernel(BaseEstimator, TransformerMixin): """ This is a class for computing the persistence scale space kernel matrix from a list of persistence diagrams. The persistence scale space kernel is computed by adding the symmetric to the diagonal of each point in each persistence diagram, with negative weight, and then convolving the points with a Gaussian kernel. See https://www.cv-foundation.org/openaccess/content_cvpr_2015/papers/Reininghaus_A_Stable_Multi-Scale_2015_CVPR_paper.pdf for more details. """ - def __init__(self, bandwidth=1., kernel_approx=None): + def __init__(self, bandwidth=1., kernel_approx=None, n_jobs=None): """ Constructor for the PersistenceScaleSpaceKernel class. Parameters: bandwidth (double): bandwidth of the Gaussian kernel with which persistence diagrams will be convolved (default 1.) kernel_approx (class): kernel approximation class used to speed up computation (default None). Common kernel approximations classes can be found in the scikit-learn library (such as RBFSampler for instance). + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_kernels` for details. """ self.bandwidth, self.kernel_approx = bandwidth, kernel_approx + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -224,7 +231,7 @@ class PersistenceScaleSpaceKernel(BaseEstimator, TransformerMixin): Returns: numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise persistence scale space kernel values. """ - return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="persistence_scale_space", bandwidth=self.bandwidth, kernel_approx=self.kernel_approx) + return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="persistence_scale_space", bandwidth=self.bandwidth, kernel_approx=self.kernel_approx, n_jobs=self.n_jobs) def __call__(self, diag1, diag2): """ @@ -243,7 +250,7 @@ class PersistenceFisherKernel(BaseEstimator, TransformerMixin): """ This is a class for computing the persistence Fisher kernel matrix from a list of persistence diagrams. The persistence Fisher kernel is computed by exponentiating the corresponding persistence Fisher distance with a Gaussian kernel. See papers.nips.cc/paper/8205-persistence-fisher-kernel-a-riemannian-manifold-kernel-for-persistence-diagrams for more details. """ - def __init__(self, bandwidth_fisher=1., bandwidth=1., kernel_approx=None): + def __init__(self, bandwidth_fisher=1., bandwidth=1., kernel_approx=None, n_jobs=None): """ Constructor for the PersistenceFisherKernel class. @@ -251,9 +258,11 @@ class PersistenceFisherKernel(BaseEstimator, TransformerMixin): bandwidth (double): bandwidth of the Gaussian kernel applied to the persistence Fisher distance (default 1.). bandwidth_fisher (double): bandwidth of the Gaussian kernel used to turn persistence diagrams into probability distributions by PersistenceFisherDistance class (default 1.). kernel_approx (class): kernel approximation class used to speed up computation (default None). Common kernel approximations classes can be found in the scikit-learn library (such as RBFSampler for instance). + n_jobs (int): number of jobs to use for the computation. See :func:`pairwise_persistence_diagram_kernels` for details. """ self.bandwidth = bandwidth self.bandwidth_fisher, self.kernel_approx = bandwidth_fisher, kernel_approx + self.n_jobs = n_jobs def fit(self, X, y=None): """ @@ -276,7 +285,7 @@ class PersistenceFisherKernel(BaseEstimator, TransformerMixin): Returns: numpy array of shape (number of diagrams in **diagrams**) x (number of diagrams in X): matrix of pairwise persistence Fisher kernel values. """ - return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="persistence_fisher", bandwidth=self.bandwidth, bandwidth_fisher=self.bandwidth_fisher, kernel_approx=self.kernel_approx) + return pairwise_persistence_diagram_kernels(X, self.diagrams_, kernel="persistence_fisher", bandwidth=self.bandwidth, bandwidth_fisher=self.bandwidth_fisher, kernel_approx=self.kernel_approx, n_jobs=self.n_jobs) def __call__(self, diag1, diag2): """ -- cgit v1.2.3 From 69852030a6d1b68f3283b5727c6b944a9c7f5e73 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Tue, 2 Jun 2020 21:07:29 +0200 Subject: Some test --- src/python/gudhi/representations/kernel_methods.py | 2 +- src/python/gudhi/representations/metrics.py | 2 +- src/python/test/test_representations.py | 33 +++++++++++++++++++++- 3 files changed, 34 insertions(+), 3 deletions(-) diff --git a/src/python/gudhi/representations/kernel_methods.py b/src/python/gudhi/representations/kernel_methods.py index 6e4f0619..23fd23c7 100644 --- a/src/python/gudhi/representations/kernel_methods.py +++ b/src/python/gudhi/representations/kernel_methods.py @@ -75,7 +75,7 @@ def pairwise_persistence_diagram_kernels(X, Y=None, kernel="sliced_wasserstein", numpy array of shape (nxm): kernel matrix. """ XX = np.reshape(np.arange(len(X)), [-1,1]) - YY = None if Y is None else np.reshape(np.arange(len(Y)), [-1,1]) + YY = None if Y is None or Y is X else np.reshape(np.arange(len(Y)), [-1,1]) if kernel == "sliced_wasserstein": return np.exp(-pairwise_persistence_diagram_distances(X, Y, metric="sliced_wasserstein", num_directions=kwargs["num_directions"], n_jobs=n_jobs) / kwargs["bandwidth"]) elif kernel == "persistence_fisher": diff --git a/src/python/gudhi/representations/metrics.py b/src/python/gudhi/representations/metrics.py index 84907160..cf2e0879 100644 --- a/src/python/gudhi/representations/metrics.py +++ b/src/python/gudhi/representations/metrics.py @@ -164,7 +164,7 @@ def pairwise_persistence_diagram_distances(X, Y=None, metric="bottleneck", n_job numpy array of shape (nxm): distance matrix """ XX = np.reshape(np.arange(len(X)), [-1,1]) - YY = None if Y is None else np.reshape(np.arange(len(Y)), [-1,1]) + YY = None if Y is None or Y is X else np.reshape(np.arange(len(Y)), [-1,1]) if metric == "bottleneck": try: from .. import bottleneck_distance diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py index dba7f952..589cee00 100755 --- a/src/python/test/test_representations.py +++ b/src/python/test/test_representations.py @@ -1,12 +1,43 @@ import os import sys import matplotlib.pyplot as plt +import numpy as np +import pytest + def test_representations_examples(): # Disable graphics for testing purposes - plt.show = lambda:None + plt.show = lambda: None here = os.path.dirname(os.path.realpath(__file__)) sys.path.append(here + "/../example") import diagram_vectorizations_distances_kernels return None + + +from gudhi.representations.metrics import * +from gudhi.representations.kernel_methods import * + + +def _n_diags(n): + l = [] + for _ in range(n): + a = np.random.rand(50, 2) + a[:, 1] += a[:, 0] # So that y >= x + l.append(a) + return l + + +def test_multiple(): + l1 = _n_diags(9) + l2 = _n_diags(11) + l1b = l1.copy() + d1 = pairwise_persistence_diagram_distances(l1, e=0.00001, n_jobs=4) + d2 = BottleneckDistance(epsilon=0.00001).fit_transform(l1) + d3 = pairwise_persistence_diagram_distances(l1, l1b, e=0.00001, n_jobs=4) + assert d1 == pytest.approx(d2) + assert d3 == pytest.approx(d2, abs=1e-5) # Because of 0 entries (on the diagonal) + d1 = pairwise_persistence_diagram_distances(l1, l2, metric="wasserstein", order=2, internal_p=2) + d2 = WassersteinDistance(order=2, internal_p=2, n_jobs=4).fit(l2).transform(l1) + print(d1.shape, d2.shape) + assert d1 == pytest.approx(d2, rel=.02) -- cgit v1.2.3 From 25176e3ba75005f36cfe3ebc15e1fcba8cd3227b Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Tue, 2 Jun 2020 21:58:48 +0200 Subject: Support e=None for bottleneck_distance --- src/python/gudhi/bottleneck.cc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/python/gudhi/bottleneck.cc b/src/python/gudhi/bottleneck.cc index 732cb9a8..9337ce59 100644 --- a/src/python/gudhi/bottleneck.cc +++ b/src/python/gudhi/bottleneck.cc @@ -12,22 +12,26 @@ #include -double bottleneck(Dgm d1, Dgm d2, double epsilon) +// For compatibility with older versions, we want to support e=None. +// In C++17, the recommended way is std::optional. +double bottleneck(Dgm d1, Dgm d2, py::object epsilon) { + double e = (std::numeric_limits::min)(); + if (!epsilon.is_none()) e = epsilon.cast(); // I *think* the call to request() has to be before releasing the GIL. auto diag1 = numpy_to_range_of_pairs(d1); auto diag2 = numpy_to_range_of_pairs(d2); py::gil_scoped_release release; - return Gudhi::persistence_diagram::bottleneck_distance(diag1, diag2, epsilon); + return Gudhi::persistence_diagram::bottleneck_distance(diag1, diag2, e); } PYBIND11_MODULE(bottleneck, m) { m.attr("__license__") = "GPL v3"; m.def("bottleneck_distance", &bottleneck, py::arg("diagram_1"), py::arg("diagram_2"), - py::arg("e") = (std::numeric_limits::min)(), + py::arg("e") = py::none(), R"pbdoc( This function returns the point corresponding to a given vertex. -- cgit v1.2.3 From 720d6822a9436f46631ec6e07d33d4581849b6e8 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 2 Jun 2020 22:05:59 +0200 Subject: Fix sphinx warning and link inside the doc --- src/python/doc/rips_complex_ref.rst | 4 ---- src/python/doc/rips_complex_user.rst | 6 ++---- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/src/python/doc/rips_complex_ref.rst b/src/python/doc/rips_complex_ref.rst index 2aa6b268..f0582d5c 100644 --- a/src/python/doc/rips_complex_ref.rst +++ b/src/python/doc/rips_complex_ref.rst @@ -13,8 +13,6 @@ Rips complex reference manual .. automethod:: gudhi.RipsComplex.__init__ -.. _weighted-rips-complex-reference-manual: - ====================================== Weighted Rips complex reference manual ====================================== @@ -26,8 +24,6 @@ Weighted Rips complex reference manual .. automethod:: gudhi.weighted_rips_complex.WeightedRipsComplex.__init__ -.. _dtm-rips-complex-reference-manual: - ================================= DTM Rips complex reference manual ================================= diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst index dd2f2cc0..868ef0e7 100644 --- a/src/python/doc/rips_complex_user.rst +++ b/src/python/doc/rips_complex_user.rst @@ -378,7 +378,7 @@ Example from a point cloud combined with DistanceToMeasure ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Combining with DistanceToMeasure, one can compute the DTM-filtration of a point set, as in `this notebook `_. -Remark that `DTMRipsComplex `_ class provides exactly this function. +Remark that `DTM Rips Complex (user manual) `_ class provides exactly this function. .. testcode:: @@ -400,12 +400,10 @@ The output is: [(0, (3.1622776601683795, inf)), (0, (3.1622776601683795, 5.39834563766817)), (0, (3.1622776601683795, 5.39834563766817))] -.. _dtm-rips-complex: - DTM Rips Complex ---------------- -`DTMRipsComplex `_ builds a simplicial complex from a point set or a full distance matrix (in the form of ndarray), as described in the above example. +`DTMRipsComplex (reference manual) `_ builds a simplicial complex from a point set or a full distance matrix (in the form of ndarray), as described in the above example. This class constructs a weighted Rips complex giving larger weights to outliers, which reduces their impact on the persistence diagram. See `this notebook `_ for some experiments. .. testcode:: -- cgit v1.2.3 From 83db295e36db019172cdcf3a490c500a880c862f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 3 Jun 2020 07:36:27 +0200 Subject: doc review: use :class: to link to the ref manual, rollback link to the user manual --- src/python/doc/rips_complex_user.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst index 868ef0e7..6048cc4e 100644 --- a/src/python/doc/rips_complex_user.rst +++ b/src/python/doc/rips_complex_user.rst @@ -378,7 +378,7 @@ Example from a point cloud combined with DistanceToMeasure ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Combining with DistanceToMeasure, one can compute the DTM-filtration of a point set, as in `this notebook `_. -Remark that `DTM Rips Complex (user manual) `_ class provides exactly this function. +Remark that `DTMRipsComplex `_ class provides exactly this function. .. testcode:: @@ -403,7 +403,7 @@ The output is: DTM Rips Complex ---------------- -`DTMRipsComplex (reference manual) `_ builds a simplicial complex from a point set or a full distance matrix (in the form of ndarray), as described in the above example. +:class:`~gudhi.dtm_rips_complex.DTMRipsComplex` builds a simplicial complex from a point set or a full distance matrix (in the form of ndarray), as described in the above example. This class constructs a weighted Rips complex giving larger weights to outliers, which reduces their impact on the persistence diagram. See `this notebook `_ for some experiments. .. testcode:: -- cgit v1.2.3 From 96f14e32f9b2230a6204f300488b0aad04a676a1 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 3 Jun 2020 09:39:28 +0200 Subject: Mention Joblib in installation doc --- src/python/doc/installation.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst index de09c5b3..a66e910e 100644 --- a/src/python/doc/installation.rst +++ b/src/python/doc/installation.rst @@ -260,6 +260,13 @@ a flag `enable_autodiff=True` is used). In order to reduce code duplication, we use `EagerPy `_ which wraps arrays from PyTorch, TensorFlow and JAX in a common interface. +Joblib +------ + +`Joblib `_ is used both as a dependency of `Scikit-learn`_, +and directly for parallelism in some modules (:class:`~gudhi.point_cloud.knn.KNearestNeighbors`, +:func:`~gudhi.representations.metrics.pairwise_persistence_diagram_distances`). + Hnswlib ------- -- cgit v1.2.3 From 18619efce47ef7fc44ba97e9b37b7f6162f5fe1b Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Thu, 4 Jun 2020 16:41:53 +0200 Subject: Workaround for ssize_t on windows --- src/python/gudhi/hera/bottleneck.cc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/python/gudhi/hera/bottleneck.cc b/src/python/gudhi/hera/bottleneck.cc index e00b4682..f543613e 100644 --- a/src/python/gudhi/hera/bottleneck.cc +++ b/src/python/gudhi/hera/bottleneck.cc @@ -8,6 +8,14 @@ * - YYYY/MM Author: Description of the modification */ +// https://github.com/grey-narn/hera/issues/3 +// ssize_t is a non-standard type (well, posix) +// BaseTsd.h provides SSIZE_T on windows, this one should be the same there. +#ifdef _MSC_VER +#include +typedef std::ptrdiff_t ssize_t; +#endif + #include // Hera #include -- cgit v1.2.3 From e84f5d1b3c753147ac1601537b2be15ec118144e Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Thu, 4 Jun 2020 17:01:33 +0200 Subject: user_version: don't copy hera if an external version is specified --- src/cmake/modules/GUDHI_third_party_libraries.cmake | 6 ++++-- src/cmake/modules/GUDHI_user_version_target.cmake | 7 +++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index d80283d2..a56a2756 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -68,8 +68,10 @@ if(CGAL_FOUND) endif() # For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera. -set(HERA_WASSERSTEIN_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include CACHE PATH "Directory where one can find Hera's wasserstein.h") -set(HERA_BOTTLENECK_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include CACHE PATH "Directory where one can find Hera's bottleneck.h") +set(HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include) +set(HERA_WASSERSTEIN_INCLUDE_DIR ${HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's wasserstein.h") +set(HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include) +set(HERA_BOTTLENECK_INCLUDE_DIR ${HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's bottleneck.h") option(WITH_GUDHI_USE_TBB "Build with Intel TBB parallelization" ON) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index 491fa459..e4f39aae 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -67,8 +67,11 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI) -add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E - copy_directory ${CMAKE_SOURCE_DIR}/ext/hera ${GUDHI_USER_VERSION_DIR}/ext/hera) +if(HERA_WASSERSTEIN_INCLUDE_DIR STREQUAL HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR OR + HERA_BOTTLENECK_INCLUDE_DIR STREQUAL HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR) + add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E + copy_directory ${CMAKE_SOURCE_DIR}/ext/hera ${GUDHI_USER_VERSION_DIR}/ext/hera) +endif() set(GUDHI_DIRECTORIES "doc;example;concept;utilities") -- cgit v1.2.3 From bea81f2d7bc53876a6f071c919663261314965ab Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Thu, 4 Jun 2020 17:21:05 +0200 Subject: Use ssize_t from pybind11 --- src/python/gudhi/hera/bottleneck.cc | 10 ++++------ src/python/include/pybind11_diagram_utils.h | 8 ++++---- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/src/python/gudhi/hera/bottleneck.cc b/src/python/gudhi/hera/bottleneck.cc index f543613e..846a3525 100644 --- a/src/python/gudhi/hera/bottleneck.cc +++ b/src/python/gudhi/hera/bottleneck.cc @@ -8,18 +8,16 @@ * - YYYY/MM Author: Description of the modification */ +#include + +#ifdef _MSC_VER // https://github.com/grey-narn/hera/issues/3 // ssize_t is a non-standard type (well, posix) -// BaseTsd.h provides SSIZE_T on windows, this one should be the same there. -#ifdef _MSC_VER -#include -typedef std::ptrdiff_t ssize_t; +using py::ssize_t; #endif #include // Hera -#include - double bottleneck_distance(Dgm d1, Dgm d2, double delta) { // I *think* the call to request() has to be before releasing the GIL. diff --git a/src/python/include/pybind11_diagram_utils.h b/src/python/include/pybind11_diagram_utils.h index d9627258..2d5194f4 100644 --- a/src/python/include/pybind11_diagram_utils.h +++ b/src/python/include/pybind11_diagram_utils.h @@ -18,8 +18,8 @@ namespace py = pybind11; typedef py::array_t Dgm; // Get m[i,0] and m[i,1] as a pair -static auto pairify(void* p, ssize_t h, ssize_t w) { - return [=](ssize_t i){ +static auto pairify(void* p, py::ssize_t h, py::ssize_t w) { + return [=](py::ssize_t i){ char* birth = (char*)p + i * h; char* death = birth + w; return std::make_pair(*(double*)birth, *(double*)death); @@ -32,8 +32,8 @@ inline auto numpy_to_range_of_pairs(py::array_t dgm) { if((buf.ndim!=2 || buf.shape[1]!=2) && (buf.ndim!=1 || buf.shape[0]!=0)) throw std::runtime_error("Diagram must be an array of size n x 2"); // In the case of shape (0), avoid reading non-existing strides[1] even if we won't use it. - ssize_t stride1 = buf.ndim == 2 ? buf.strides[1] : 0; - auto cnt = boost::counting_range(0, buf.shape[0]); + py::ssize_t stride1 = buf.ndim == 2 ? buf.strides[1] : 0; + auto cnt = boost::counting_range(0, buf.shape[0]); return boost::adaptors::transform(cnt, pairify(buf.ptr, buf.strides[0], stride1)); // Be careful that the returned range cannot contain references to dead temporaries. } -- cgit v1.2.3 From a4598f043824db493369d3e78048139988bde9aa Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau <10407034+VincentRouvreau@users.noreply.github.com> Date: Fri, 5 Jun 2020 17:59:37 +0200 Subject: Update src/python/gudhi/alpha_complex.pyx Co-authored-by: Marc Glisse --- src/python/gudhi/alpha_complex.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index 5bc9ebc4..80e54da0 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -75,7 +75,7 @@ cdef class AlphaComplex: # The real cython constructor def __cinit__(self, points = None, off_file = '', complexity = 'safe'): - assert complexity == 'fast' or complexity == 'safe' or complexity == 'exact', "Alpha complex complexity can be 'fast', 'safe' or 'exact'" + assert complexity in ['fast', 'safe', 'exact'], "Alpha complex complexity can only be 'fast', 'safe' or 'exact'" self.fast = complexity == 'fast' self.exact = complexity == 'safe' -- cgit v1.2.3 From 841dac596c9a2ce8e1882e382a9cc1d003edfbee Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jun 2020 18:03:04 +0200 Subject: Code review: type is float --- src/python/doc/alpha_complex_user.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst index c1ed0eaa..a75a0347 100644 --- a/src/python/doc/alpha_complex_user.rst +++ b/src/python/doc/alpha_complex_user.rst @@ -25,7 +25,7 @@ Remarks make sense in higher dimension precisely because you can bound the radii. * Using the default :code:`complexity = 'safe'` makes the construction safe. If you pass :code:`complexity = 'exact'` to :func:`~gudhi.AlphaComplex.__init__`, the filtration values are the exact - ones converted to the filtration value type of the simplicial complex. This can be very slow. + ones converted to float. This can be very slow. If you pass :code:`complexity = 'safe'` (the default) or :code:`complexity = 'fast'`, the filtration values are only guaranteed to have a small multiplicative error compared to the exact value, see `CGAL::Lazy_exact_nt::set_relative_precision_of_to_double `_ -- cgit v1.2.3 From c8f5c9fc0691c8539ca164805f34554227061ba7 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jun 2020 18:07:25 +0200 Subject: Code review: no link to CGAL::Lazy_exact_nt and no guarantee on fast precision --- src/python/doc/alpha_complex_user.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst index a75a0347..bc9fe513 100644 --- a/src/python/doc/alpha_complex_user.rst +++ b/src/python/doc/alpha_complex_user.rst @@ -26,10 +26,9 @@ Remarks * Using the default :code:`complexity = 'safe'` makes the construction safe. If you pass :code:`complexity = 'exact'` to :func:`~gudhi.AlphaComplex.__init__`, the filtration values are the exact ones converted to float. This can be very slow. - If you pass :code:`complexity = 'safe'` (the default) or :code:`complexity = 'fast'`, the filtration values are only - guaranteed to have a small multiplicative error compared to the exact value, see - `CGAL::Lazy_exact_nt::set_relative_precision_of_to_double `_ - for details. A drawback, when computing persistence, is that an empty exact interval [10^12,10^12] may become a + If you pass :code:`complexity = 'safe'` (the default), the filtration values are only + guaranteed to have a small multiplicative error compared to the exact value. + A drawback, when computing persistence, is that an empty exact interval [10^12,10^12] may become a non-empty approximate interval [10^12,10^12+10^6]. Using :code:`complexity = 'fast'` makes the computations slightly faster, and the combinatorics are still exact, but the computation of filtration values can exceptionally be arbitrarily bad. In all cases, we still guarantee that the -- cgit v1.2.3 From 58cb456a33b4eec8ecc926457668d7d77d6662c3 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jun 2020 18:23:02 +0200 Subject: Code review: 'fast' class attribute was not necessary and redundant with C++ class attribute --- src/python/gudhi/alpha_complex.pyx | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index 80e54da0..3855f1ac 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -54,7 +54,6 @@ cdef class AlphaComplex: """ cdef Alpha_complex_interface * this_ptr - cdef bool fast cdef bool exact # Fake constructor that does nothing but documenting the constructor @@ -76,13 +75,13 @@ cdef class AlphaComplex: # The real cython constructor def __cinit__(self, points = None, off_file = '', complexity = 'safe'): assert complexity in ['fast', 'safe', 'exact'], "Alpha complex complexity can only be 'fast', 'safe' or 'exact'" - self.fast = complexity == 'fast' + cdef bool fast = complexity == 'fast' self.exact = complexity == 'safe' cdef vector[vector[double]] pts if off_file: if os.path.isfile(off_file): - self.this_ptr = new Alpha_complex_interface(off_file.encode('utf-8'), self.fast, True) + self.this_ptr = new Alpha_complex_interface(off_file.encode('utf-8'), fast, True) else: print("file " + off_file + " not found.") else: @@ -91,7 +90,7 @@ cdef class AlphaComplex: points=[] pts = points with nogil: - self.this_ptr = new Alpha_complex_interface(pts, self.fast) + self.this_ptr = new Alpha_complex_interface(pts, fast) def __dealloc__(self): if self.this_ptr != NULL: -- cgit v1.2.3 From e7ccfabc395352823c0330ef876b9ac0ef72e840 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jun 2020 18:31:26 +0200 Subject: code review: rename ph as point --- src/python/include/Alpha_complex_interface.h | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h index dce9c8e9..3ac5db1f 100644 --- a/src/python/include/Alpha_complex_interface.h +++ b/src/python/include/Alpha_complex_interface.h @@ -37,9 +37,9 @@ class Alpha_complex_interface { using Point_inexact_kernel = typename Inexact_kernel::Point_d; template - std::vector pt_cgal_to_cython(CgalPointType& ph) { + std::vector pt_cgal_to_cython(CgalPointType& point) { std::vector vd; - for (auto coord = ph.cartesian_begin(); coord != ph.cartesian_end(); coord++) + for (auto coord = point.cartesian_begin(); coord != point.cartesian_end(); coord++) vd.push_back(CGAL::to_double(*coord)); return vd; } @@ -71,11 +71,11 @@ class Alpha_complex_interface { std::vector get_point(int vh) { if (fast_version_) { - Point_inexact_kernel const& ph = ac_inexact_ptr_->get_point(vh); - return pt_cgal_to_cython(ph); + Point_inexact_kernel const& point = ac_inexact_ptr_->get_point(vh); + return pt_cgal_to_cython(point); } else { - Point_exact_kernel const& ph = ac_exact_ptr_->get_point(vh); - return pt_cgal_to_cython(ph); + Point_exact_kernel const& point = ac_exact_ptr_->get_point(vh); + return pt_cgal_to_cython(point); } } -- cgit v1.2.3 From 4f02ba9c5a3233ff9d4554578fbe3ae456b9711f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jun 2020 18:44:17 +0200 Subject: code review: rename complexity with precision --- src/python/doc/alpha_complex_user.rst | 8 ++++---- src/python/gudhi/alpha_complex.pyx | 14 +++++++------- src/python/test/test_alpha_complex.py | 22 +++++++++++----------- 3 files changed, 22 insertions(+), 22 deletions(-) diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst index bc9fe513..e8b4f25e 100644 --- a/src/python/doc/alpha_complex_user.rst +++ b/src/python/doc/alpha_complex_user.rst @@ -23,14 +23,14 @@ Remarks equivalent to the `Čech complex `_ and much smaller if you do not bound the radii. `Čech complex `_ can still make sense in higher dimension precisely because you can bound the radii. -* Using the default :code:`complexity = 'safe'` makes the construction safe. - If you pass :code:`complexity = 'exact'` to :func:`~gudhi.AlphaComplex.__init__`, the filtration values are the exact +* Using the default :code:`precision = 'safe'` makes the construction safe. + If you pass :code:`precision = 'exact'` to :func:`~gudhi.AlphaComplex.__init__`, the filtration values are the exact ones converted to float. This can be very slow. - If you pass :code:`complexity = 'safe'` (the default), the filtration values are only + If you pass :code:`precision = 'safe'` (the default), the filtration values are only guaranteed to have a small multiplicative error compared to the exact value. A drawback, when computing persistence, is that an empty exact interval [10^12,10^12] may become a non-empty approximate interval [10^12,10^12+10^6]. - Using :code:`complexity = 'fast'` makes the computations slightly faster, and the combinatorics are still exact, but + Using :code:`precision = 'fast'` makes the computations slightly faster, and the combinatorics are still exact, but the computation of filtration values can exceptionally be arbitrarily bad. In all cases, we still guarantee that the output is a valid filtration (faces have a filtration value no larger than their cofaces). * For performances reasons, it is advised to use Alpha_complex with `CGAL `_ :math:`\geq` 5.0.0. diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index 3855f1ac..d9c2be81 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -57,7 +57,7 @@ cdef class AlphaComplex: cdef bool exact # Fake constructor that does nothing but documenting the constructor - def __init__(self, points=None, off_file='', complexity='safe'): + def __init__(self, points=None, off_file='', precision='safe'): """AlphaComplex constructor. :param points: A list of points in d-Dimension. @@ -68,15 +68,15 @@ cdef class AlphaComplex: :param off_file: An OFF file style name. :type off_file: string - :param complexity: Alpha complex complexity can be 'fast', 'safe' or 'exact'. Default is 'safe'. - :type complexity: string + :param precision: Alpha complex precision can be 'fast', 'safe' or 'exact'. Default is 'safe'. + :type precision: string """ # The real cython constructor - def __cinit__(self, points = None, off_file = '', complexity = 'safe'): - assert complexity in ['fast', 'safe', 'exact'], "Alpha complex complexity can only be 'fast', 'safe' or 'exact'" - cdef bool fast = complexity == 'fast' - self.exact = complexity == 'safe' + def __cinit__(self, points = None, off_file = '', precision = 'safe'): + assert precision in ['fast', 'safe', 'exact'], "Alpha complex precision can only be 'fast', 'safe' or 'exact'" + cdef bool fast = precision == 'fast' + self.exact = precision == 'safe' cdef vector[vector[double]] pts if off_file: diff --git a/src/python/test/test_alpha_complex.py b/src/python/test/test_alpha_complex.py index 913397dd..943ad2c4 100755 --- a/src/python/test/test_alpha_complex.py +++ b/src/python/test/test_alpha_complex.py @@ -24,8 +24,8 @@ __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" -def _empty_alpha(complexity): - alpha_complex = AlphaComplex(points=[[0, 0]], complexity = complexity) +def _empty_alpha(precision): + alpha_complex = AlphaComplex(points=[[0, 0]], precision = precision) assert alpha_complex.__is_defined() == True def test_empty_alpha(): @@ -33,9 +33,9 @@ def test_empty_alpha(): _empty_alpha('safe') _empty_alpha('exact') -def _infinite_alpha(complexity): +def _infinite_alpha(precision): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - alpha_complex = AlphaComplex(points=point_list, complexity = complexity) + alpha_complex = AlphaComplex(points=point_list, precision = precision) assert alpha_complex.__is_defined() == True simplex_tree = alpha_complex.create_simplex_tree() @@ -88,9 +88,9 @@ def test_infinite_alpha(): _infinite_alpha('safe') _infinite_alpha('exact') -def _filtered_alpha(complexity): +def _filtered_alpha(precision): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - filtered_alpha = AlphaComplex(points=point_list, complexity = complexity) + filtered_alpha = AlphaComplex(points=point_list, precision = precision) simplex_tree = filtered_alpha.create_simplex_tree(max_alpha_square=0.25) @@ -132,7 +132,7 @@ def test_filtered_alpha(): _filtered_alpha('safe') _filtered_alpha('exact') -def _safe_alpha_persistence_comparison(complexity): +def _safe_alpha_persistence_comparison(precision): #generate periodic signal time = np.arange(0, 10, 1) signal = [math.sin(x) for x in time] @@ -144,10 +144,10 @@ def _safe_alpha_persistence_comparison(complexity): embedding2 = [[signal[i], delayed[i]] for i in range(len(time))] #build alpha complex and simplex tree - alpha_complex1 = AlphaComplex(points=embedding1, complexity = complexity) + alpha_complex1 = AlphaComplex(points=embedding1, precision = precision) simplex_tree1 = alpha_complex1.create_simplex_tree() - alpha_complex2 = AlphaComplex(points=embedding2, complexity = complexity) + alpha_complex2 = AlphaComplex(points=embedding2, precision = precision) simplex_tree2 = alpha_complex2.create_simplex_tree() diag1 = simplex_tree1.persistence() @@ -163,9 +163,9 @@ def test_safe_alpha_persistence_comparison(): _safe_alpha_persistence_comparison('safe') _safe_alpha_persistence_comparison('exact') -def _delaunay_complex(complexity): +def _delaunay_complex(precision): point_list = [[0, 0], [1, 0], [0, 1], [1, 1]] - filtered_alpha = AlphaComplex(points=point_list, complexity = complexity) + filtered_alpha = AlphaComplex(points=point_list, precision = precision) simplex_tree = filtered_alpha.create_simplex_tree(default_filtration_value = True) -- cgit v1.2.3 From 4a437177cb2b10b6462380c39739a923c08dc121 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Fri, 5 Jun 2020 19:33:53 +0200 Subject: Doc tweak by Vincent --- src/python/gudhi/bottleneck.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/gudhi/bottleneck.cc b/src/python/gudhi/bottleneck.cc index 9337ce59..838bf9eb 100644 --- a/src/python/gudhi/bottleneck.cc +++ b/src/python/gudhi/bottleneck.cc @@ -46,7 +46,7 @@ PYBIND11_MODULE(bottleneck, m) { bits of the mantissa may be wrong). This version of the algorithm takes advantage of the limited precision of `double` and is usually a lot faster to compute, whatever the value of `e`. - Thus, by default, `e` is the smallest positive double. + Thus, by default (`e=None`), `e` is the smallest positive double. :type e: float :rtype: float :returns: the bottleneck distance. -- cgit v1.2.3 From 82dc35dd749a1f388be268d7a7e3bd22f18afcf7 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Fri, 5 Jun 2020 20:16:38 +0200 Subject: Doc changes after Vincent's review --- src/python/doc/bottleneck_distance_user.rst | 5 +++-- src/python/gudhi/bottleneck.cc | 3 ++- src/python/gudhi/hera/bottleneck.cc | 3 +++ 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/python/doc/bottleneck_distance_user.rst b/src/python/doc/bottleneck_distance_user.rst index 49bd3706..6c6e08d9 100644 --- a/src/python/doc/bottleneck_distance_user.rst +++ b/src/python/doc/bottleneck_distance_user.rst @@ -10,7 +10,7 @@ Definition .. include:: bottleneck_distance_sum.inc This implementation by François Godi is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems" -:cite:`DBLP:journals/algorithmica/EfratIK01` and requires `CGAL `_. +:cite:`DBLP:journals/algorithmica/EfratIK01` and requires `CGAL `_ (`GPL v3 `_). .. autofunction:: gudhi.bottleneck_distance @@ -20,7 +20,8 @@ based on "Geometry Helps to Compare Persistence Diagrams" :cite:`Kerber:2017:GHC:3047249.3064175` by Michael Kerber, Dmitriy Morozov, and Arnur Nigmetov. -Beware that its approximation allows for a multiplicative error, while the function above uses an additive error. +.. warning:: + Beware that its approximation allows for a multiplicative error, while the function above uses an additive error. .. autofunction:: gudhi.hera.bottleneck_distance diff --git a/src/python/gudhi/bottleneck.cc b/src/python/gudhi/bottleneck.cc index 732cb9a8..59be6088 100644 --- a/src/python/gudhi/bottleneck.cc +++ b/src/python/gudhi/bottleneck.cc @@ -29,7 +29,8 @@ PYBIND11_MODULE(bottleneck, m) { py::arg("diagram_1"), py::arg("diagram_2"), py::arg("e") = (std::numeric_limits::min)(), R"pbdoc( - This function returns the point corresponding to a given vertex. + Compute the Bottleneck distance between two diagrams. + Points at infinity and on the diagonal are supported. :param diagram_1: The first diagram. :type diagram_1: numpy array of shape (m,2) diff --git a/src/python/gudhi/hera/bottleneck.cc b/src/python/gudhi/hera/bottleneck.cc index 846a3525..0cb562ce 100644 --- a/src/python/gudhi/hera/bottleneck.cc +++ b/src/python/gudhi/hera/bottleneck.cc @@ -40,6 +40,9 @@ PYBIND11_MODULE(bottleneck, m) { Compute the Bottleneck distance between two diagrams. Points at infinity are supported. + .. note:: + Points on the diagonal are not supported and must be filtered out before calling this function. + Parameters: X (n x 2 numpy array): First diagram Y (n x 2 numpy array): Second diagram -- cgit v1.2.3 From cd0287d0d64a1227a665ff54f80a3e8ad44a7041 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Fri, 5 Jun 2020 20:24:46 +0200 Subject: Update hera --- ext/hera | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/hera b/ext/hera index 2c5e6c60..b73ed1fa 160000 --- a/ext/hera +++ b/ext/hera @@ -1 +1 @@ -Subproject commit 2c5e6c606ee37cd68bbe9f9915dba99f7677dd87 +Subproject commit b73ed1face2c609958556e6f2b7704bbd8aaa263 -- cgit v1.2.3 From 19dc69e6edaac076da5138fe091be367a3652057 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 5 Jun 2020 22:28:03 +0200 Subject: code review: exact version was not correct --- src/python/gudhi/alpha_complex.pyx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx index d9c2be81..a356384d 100644 --- a/src/python/gudhi/alpha_complex.pyx +++ b/src/python/gudhi/alpha_complex.pyx @@ -76,7 +76,7 @@ cdef class AlphaComplex: def __cinit__(self, points = None, off_file = '', precision = 'safe'): assert precision in ['fast', 'safe', 'exact'], "Alpha complex precision can only be 'fast', 'safe' or 'exact'" cdef bool fast = precision == 'fast' - self.exact = precision == 'safe' + self.exact = precision == 'exact' cdef vector[vector[double]] pts if off_file: -- cgit v1.2.3 From dacc3e363ded8b68bb4b71c1602e2c52b10b36e5 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Fri, 5 Jun 2020 22:29:32 +0200 Subject: author, etc --- src/python/gudhi/hera/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/python/gudhi/hera/__init__.py b/src/python/gudhi/hera/__init__.py index 044f81cd..f70b92b9 100644 --- a/src/python/gudhi/hera/__init__.py +++ b/src/python/gudhi/hera/__init__.py @@ -1,2 +1,7 @@ from .wasserstein import wasserstein_distance from .bottleneck import bottleneck_distance + + +__author__ = "Marc Glisse" +__copyright__ = "Copyright (C) 2020 Inria" +__license__ = "MIT" -- cgit v1.2.3 From 4559eccd8b1c3610f7a82a709929013303368506 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 6 Jun 2020 12:06:33 +0200 Subject: Try with devtoolset-8 and boost clone from git --- .github/workflows/pip-packaging-osx.yml | 4 +++- CMakeGUDHIVersion.txt | 2 +- Dockerfile_for_pip | 11 +++++++---- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index f30b89d8..1f669a5e 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -41,4 +41,6 @@ jobs: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* \ No newline at end of file + mkdir wheelhouse + delocate-listdeps build/src/python/dist/* && delocate-wheel --require-archs x86_64 -w wheelhouse build/src/python/dist/* + python -m twine upload --repository-url https://test.pypi.org/legacy/ wheelhouse/* \ No newline at end of file diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 95e67372..d1490bd3 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a7) +set (GUDHI_PATCH_VERSION 0a8) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/Dockerfile_for_pip b/Dockerfile_for_pip index 0f4fa8a0..8f60e37c 100644 --- a/Dockerfile_for_pip +++ b/Dockerfile_for_pip @@ -6,6 +6,7 @@ RUN yum -y update && yum -y install \ eigen3-devel \ mpfr-devel \ gmp-devel \ + devtoolset-8 \ && yum clean all RUN mkdir -p /opt/cmake \ @@ -13,13 +14,14 @@ RUN mkdir -p /opt/cmake \ && sh cmake-3.16.2-Linux-x86_64.sh --skip-license --prefix=/opt/cmake \ && rm -f cmake-3.16.2-Linux-x86_64.sh -RUN wget https://dl.bintray.com/boostorg/release/1.64.0/source/boost_1_64_0.tar.gz \ - && tar xf boost_1_64_0.tar.gz \ - && cd boost_1_64_0 \ +RUN git clone -b boost-1.73.0 --depth 1 https://github.com/boostorg/boost.git \ + && cd boost \ + && git submodule update --init \ && ./bootstrap.sh --with-libraries=filesystem,program_options,system,thread,test \ + && ./b2 headers \ && ./b2 install \ && cd .. \ - && rm -rf boost_1_64_0 + && rm -rf boost RUN wget https://github.com/CGAL/cgal/releases/download/releases%2FCGAL-5.0.2/CGAL-5.0.2.tar.xz \ && tar xf CGAL-5.0.2.tar.xz \ @@ -44,3 +46,4 @@ ENV PYTHON37="/opt/python/cp37-cp37m/" ENV PYTHON38="/opt/python/cp38-cp38/" ENV PATH="/opt/cmake/bin:${PATH}" +ENV PATH="/opt/rh/devtoolset-8/root/usr/bin:${PATH}" -- cgit v1.2.3 From 1a853df5745576f5e18b7c820c6cf5e5e2885169 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 6 Jun 2020 14:42:04 +0200 Subject: Needs to install delocate --- .github/workflows/pip-packaging-osx.yml | 2 +- CMakeGUDHIVersion.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index 1f669a5e..68fae138 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -27,7 +27,7 @@ jobs: run: | brew update && brew install boost eigen gmp mpfr cgal python -m pip install --user -r .github/build-requirements.txt - python -m pip install --user twine + python -m pip install --user twine delocate - name: Build python wheel run: | python --version diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index d1490bd3..3834c59c 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a8) +set (GUDHI_PATCH_VERSION 0a9) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 37991af75b7834a40dc014518d1943eea802a16b Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 6 Jun 2020 14:55:30 +0200 Subject: Add some debug traces for osx --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 3834c59c..537feeb9 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a9) +set (GUDHI_PATCH_VERSION 0a10) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From e822b2f1e68feebccf9dd069447c5df5574dc650 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Sat, 6 Jun 2020 14:55:58 +0200 Subject: Add some debug traces for osx --- .github/workflows/pip-packaging-osx.yml | 5 ++++- CMakeGUDHIVersion.txt | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index 68fae138..d7bfc66b 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -42,5 +42,8 @@ jobs: TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | mkdir wheelhouse - delocate-listdeps build/src/python/dist/* && delocate-wheel --require-archs x86_64 -w wheelhouse build/src/python/dist/* + type delocate-listdeps + type delocate-wheel + delocate-listdeps build/src/python/dist/* + delocate-wheel --require-archs x86_64 -w wheelhouse build/src/python/dist/* python -m twine upload --repository-url https://test.pypi.org/legacy/ wheelhouse/* \ No newline at end of file diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 537feeb9..c2c68a2e 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a10) +set (GUDHI_PATCH_VERSION 0a11) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 8f8b0965ac79f4051fc321a6f4714082624d6561 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 8 Jun 2020 08:02:20 +0200 Subject: Use full path for delocate --- .github/workflows/pip-packaging-osx.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index d7bfc66b..199e6f01 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -42,8 +42,6 @@ jobs: TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | mkdir wheelhouse - type delocate-listdeps - type delocate-wheel - delocate-listdeps build/src/python/dist/* - delocate-wheel --require-archs x86_64 -w wheelhouse build/src/python/dist/* + /Users/runner/.local/bin/delocate-listdeps build/src/python/dist/* + /Users/runner/.local/bin/delocate-wheel --require-archs x86_64 -w wheelhouse build/src/python/dist/* python -m twine upload --repository-url https://test.pypi.org/legacy/ wheelhouse/* \ No newline at end of file -- cgit v1.2.3 From 31e7f07ce852644f287f81e727eff7eef3756d12 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Mon, 8 Jun 2020 08:15:05 +0200 Subject: Forgot to increase release number --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index c2c68a2e..857b0056 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a11) +set (GUDHI_PATCH_VERSION 0a12) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From d46bfa4c50cc3bb52eaee5dffc6b798b0ae236f6 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 9 Jun 2020 07:49:45 +0200 Subject: move cython from install_requires to setup_requires --- CMakeGUDHIVersion.txt | 2 +- src/python/setup.py.in | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 857b0056..d2d98dea 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a12) +set (GUDHI_PATCH_VERSION 0a13) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/setup.py.in b/src/python/setup.py.in index dc163a37..875f1577 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -84,6 +84,6 @@ setup( long_description_content_type='text/x-rst', long_description=long_description, ext_modules = ext_modules, - install_requires = ['cython','numpy >= 1.9',], - setup_requires = ['numpy >= 1.9','pybind11',], + install_requires = ['numpy >= 1.9',], + setup_requires = ['cython','numpy >= 1.9','pybind11',], ) -- cgit v1.2.3 From fff2aee667a674b7da2889abdafd8cec360ef621 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 10 Jun 2020 08:08:50 +0200 Subject: Remove tbb to see if module import works better --- .github/workflows/pip-packaging-windows.yml | 2 +- CMakeGUDHIVersion.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 53abc036..96ac05c0 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -25,7 +25,7 @@ jobs: architecture: x64 - name: Install dependencies run: | - vcpkg install tbb:x64-windows boost-graph:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows + vcpkg install boost-graph:x64-windows boost-serialization:x64-windows boost-date-time:x64-windows boost-system:x64-windows boost-filesystem:x64-windows boost-units:x64-windows boost-thread:x64-windows boost-program-options:x64-windows eigen3:x64-windows mpfr:x64-windows mpir:x64-windows cgal:x64-windows python -m pip install --user -r .github/build-requirements.txt python -m pip install --user twine - name: Build python wheel diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index d2d98dea..d93850cd 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a13) +set (GUDHI_PATCH_VERSION 0a14) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From d52a7dc4c5d0a1a3de3cdb1074182079c0827c2f Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 10 Jun 2020 15:56:00 +0200 Subject: Add dependencies as artifacts to be able to download them --- .github/workflows/pip-packaging-windows.yml | 7 ++++++- CMakeGUDHIVersion.txt | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 96ac05c0..5d9eafe7 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -40,4 +40,9 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* \ No newline at end of file + run: python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* + - name: Upload artifacts + uses: actions/upload-artifact@v1 + with: + name: dependencies + path: c:/vcpkg/installed/x64-windows/lib/ diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index d93850cd..b00d82d0 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a14) +set (GUDHI_PATCH_VERSION 0a15) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 70caa9be348c87526e2d41194a3618d14633b7dc Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Wed, 10 Jun 2020 17:00:40 +0200 Subject: Add dependencies as artifacts to be able to download them --- .github/workflows/pip-packaging-windows.yml | 2 +- CMakeGUDHIVersion.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 5d9eafe7..95205258 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -45,4 +45,4 @@ jobs: uses: actions/upload-artifact@v1 with: name: dependencies - path: c:/vcpkg/installed/x64-windows/lib/ + path: c:/vcpkg/installed/x64-windows/bin/ diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index b00d82d0..4ad3921d 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a15) +set (GUDHI_PATCH_VERSION 0a16) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From e4a59f9ec6685534b03474cbcfe9395d601516e2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 11 Jun 2020 07:23:23 +0200 Subject: Add distributor the right to add dll to the package --- .github/workflows/pip-packaging-windows.yml | 7 ++----- CMakeGUDHIVersion.txt | 2 +- src/python/CMakeLists.txt | 5 ++++- src/python/gudhi/__init__.py.in | 3 +++ src/python/gudhi/_distributor_init.py | 18 ++++++++++++++++++ 5 files changed, 28 insertions(+), 7 deletions(-) create mode 100644 src/python/gudhi/_distributor_init.py diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 95205258..fe4d0227 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -35,14 +35,11 @@ jobs: cd build cmake -DCMAKE_BUILD_TYPE=Release -DGMP_INCLUDE_DIR="c:/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/vcpkg/installed/x64-windows/lib" -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python + cp c:/vcpkg/installed/x64-windows/bin/mpfr.dll .libs/ + cp c:/vcpkg/installed/x64-windows/bin/mpir.dll .libs/ python setup.py bdist_wheel - name: Publish on PyPi env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* - - name: Upload artifacts - uses: actions/upload-artifact@v1 - with: - name: dependencies - path: c:/vcpkg/installed/x64-windows/bin/ diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 4ad3921d..794b6ebf 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a16) +set (GUDHI_PATCH_VERSION 0a17) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index fee6b6f5..b35c8de7 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -235,8 +235,11 @@ if(PYTHONINTERP_FOUND) file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") - # Other .py files + # Some files for pip package file(COPY "introduction.rst" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") + file(COPY "gudhi/_distributor_init.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi/.libs") + message(" o o o ${CMAKE_CURRENT_BINARY_DIR}/gudhi/.libs") add_custom_command( OUTPUT gudhi.so diff --git a/src/python/gudhi/__init__.py.in b/src/python/gudhi/__init__.py.in index 79e12fbc..e4198dcf 100644 --- a/src/python/gudhi/__init__.py.in +++ b/src/python/gudhi/__init__.py.in @@ -10,6 +10,9 @@ from importlib import import_module from sys import exc_info +# Allow distributors to run custom init code +from . import _distributor_init + __author__ = "GUDHI Editorial Board" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "https://gudhi.inria.fr/licensing/" diff --git a/src/python/gudhi/_distributor_init.py b/src/python/gudhi/_distributor_init.py new file mode 100644 index 00000000..0ed451f9 --- /dev/null +++ b/src/python/gudhi/_distributor_init.py @@ -0,0 +1,18 @@ +''' +Helper to preload windows dlls to prevent dll not found errors. +Once a DLL is preloaded, its namespace is made available to any subsequent DLL. +''' +import os +from ctypes import WinDLL +import glob +if os.name == 'nt': + # convention for storing / loading the DLL from gudhi/.libs/, if present + try: + basedir = os.path.dirname(__file__) + except: + pass + else: + libs_dir = os.path.abspath(os.path.join(basedir, '.libs')) + if os.path.isdir(libs_dir): + for filename in glob.glob(os.path.join(libs_dir, '*dll')): + WinDLL(os.path.abspath(filename)) -- cgit v1.2.3 From c04ff4c1f5c6aa7115c2e0e57f5ecc3ebb153fb2 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 11 Jun 2020 07:48:59 +0200 Subject: bad destination --- .github/workflows/pip-packaging-windows.yml | 4 ++-- CMakeGUDHIVersion.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index fe4d0227..f229b74f 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -35,8 +35,8 @@ jobs: cd build cmake -DCMAKE_BUILD_TYPE=Release -DGMP_INCLUDE_DIR="c:/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/vcpkg/installed/x64-windows/lib" -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python - cp c:/vcpkg/installed/x64-windows/bin/mpfr.dll .libs/ - cp c:/vcpkg/installed/x64-windows/bin/mpir.dll .libs/ + cp c:/vcpkg/installed/x64-windows/bin/mpfr.dll gudhi/.libs/ + cp c:/vcpkg/installed/x64-windows/bin/mpir.dll gudhi/.libs/ python setup.py bdist_wheel - name: Publish on PyPi env: diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 794b6ebf..fab3ee75 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a17) +set (GUDHI_PATCH_VERSION 0a18) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 0f7cc48907e1b28e114d37e3bd30a2237d9f6c8c Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 11 Jun 2020 08:09:01 +0200 Subject: cannot import WinDLL from ctypes on non-windows systems --- CMakeGUDHIVersion.txt | 2 +- src/python/gudhi/_distributor_init.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index fab3ee75..21f3d58c 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a18) +set (GUDHI_PATCH_VERSION 0a19) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/gudhi/_distributor_init.py b/src/python/gudhi/_distributor_init.py index 0ed451f9..6f3dc8a2 100644 --- a/src/python/gudhi/_distributor_init.py +++ b/src/python/gudhi/_distributor_init.py @@ -3,8 +3,6 @@ Helper to preload windows dlls to prevent dll not found errors. Once a DLL is preloaded, its namespace is made available to any subsequent DLL. ''' import os -from ctypes import WinDLL -import glob if os.name == 'nt': # convention for storing / loading the DLL from gudhi/.libs/, if present try: @@ -14,5 +12,7 @@ if os.name == 'nt': else: libs_dir = os.path.abspath(os.path.join(basedir, '.libs')) if os.path.isdir(libs_dir): + from ctypes import WinDLL + import glob for filename in glob.glob(os.path.join(libs_dir, '*dll')): WinDLL(os.path.abspath(filename)) -- cgit v1.2.3 From aec85514aeaeac51d1589e6a24c4c393df892f91 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Thu, 11 Jun 2020 12:03:49 +0200 Subject: Upload dlls from gudhi directory --- .github/workflows/pip-packaging-windows.yml | 4 ++-- CMakeGUDHIVersion.txt | 2 +- src/python/CMakeLists.txt | 3 --- src/python/gudhi/__init__.py.in | 3 --- src/python/gudhi/_distributor_init.py | 18 ------------------ src/python/setup.py.in | 1 + 6 files changed, 4 insertions(+), 27 deletions(-) delete mode 100644 src/python/gudhi/_distributor_init.py diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index f229b74f..d529d4f3 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -35,8 +35,8 @@ jobs: cd build cmake -DCMAKE_BUILD_TYPE=Release -DGMP_INCLUDE_DIR="c:/vcpkg/installed/x64-windows/include" -DGMP_LIBRARIES="c:/vcpkg/installed/x64-windows/lib/mpir.lib" -DGMP_LIBRARIES_DIR="c:/vcpkg/installed/x64-windows/lib" -DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python - cp c:/vcpkg/installed/x64-windows/bin/mpfr.dll gudhi/.libs/ - cp c:/vcpkg/installed/x64-windows/bin/mpir.dll gudhi/.libs/ + cp c:/vcpkg/installed/x64-windows/bin/mpfr.dll gudhi/ + cp c:/vcpkg/installed/x64-windows/bin/mpir.dll gudhi/ python setup.py bdist_wheel - name: Publish on PyPi env: diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 21f3d58c..85d69190 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a19) +set (GUDHI_PATCH_VERSION 0a20) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index b35c8de7..966f5ef0 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -237,9 +237,6 @@ if(PYTHONINTERP_FOUND) # Some files for pip package file(COPY "introduction.rst" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") - file(COPY "gudhi/_distributor_init.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") - file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi/.libs") - message(" o o o ${CMAKE_CURRENT_BINARY_DIR}/gudhi/.libs") add_custom_command( OUTPUT gudhi.so diff --git a/src/python/gudhi/__init__.py.in b/src/python/gudhi/__init__.py.in index e4198dcf..79e12fbc 100644 --- a/src/python/gudhi/__init__.py.in +++ b/src/python/gudhi/__init__.py.in @@ -10,9 +10,6 @@ from importlib import import_module from sys import exc_info -# Allow distributors to run custom init code -from . import _distributor_init - __author__ = "GUDHI Editorial Board" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "https://gudhi.inria.fr/licensing/" diff --git a/src/python/gudhi/_distributor_init.py b/src/python/gudhi/_distributor_init.py deleted file mode 100644 index 6f3dc8a2..00000000 --- a/src/python/gudhi/_distributor_init.py +++ /dev/null @@ -1,18 +0,0 @@ -''' -Helper to preload windows dlls to prevent dll not found errors. -Once a DLL is preloaded, its namespace is made available to any subsequent DLL. -''' -import os -if os.name == 'nt': - # convention for storing / loading the DLL from gudhi/.libs/, if present - try: - basedir = os.path.dirname(__file__) - except: - pass - else: - libs_dir = os.path.abspath(os.path.join(basedir, '.libs')) - if os.path.isdir(libs_dir): - from ctypes import WinDLL - import glob - for filename in glob.glob(os.path.join(libs_dir, '*dll')): - WinDLL(os.path.abspath(filename)) diff --git a/src/python/setup.py.in b/src/python/setup.py.in index 875f1577..07ad818f 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -86,4 +86,5 @@ setup( ext_modules = ext_modules, install_requires = ['numpy >= 1.9',], setup_requires = ['cython','numpy >= 1.9','pybind11',], + package_data={"": ["*.dll"], }, ) -- cgit v1.2.3 From cae2ab045cfb801c8f0990bf96c6a52b8c7bac65 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 12 Jun 2020 07:42:45 +0200 Subject: Release candidate 1 on pypi.org, no more test --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 85d69190..a06e871c 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0a20) +set (GUDHI_PATCH_VERSION 0rc1) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 891c4ab74faa07e0e5a10cc054cf57e5f77fdc95 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 12 Jun 2020 08:07:14 +0200 Subject: No more on test.pypi.org --- .github/workflows/pip-packaging-linux.yml | 8 ++++---- .github/workflows/pip-packaging-osx.yml | 2 +- .github/workflows/pip-packaging-windows.yml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pip-packaging-linux.yml b/.github/workflows/pip-packaging-linux.yml index 6b099243..f82122b4 100644 --- a/.github/workflows/pip-packaging-linux.yml +++ b/.github/workflows/pip-packaging-linux.yml @@ -54,7 +54,7 @@ jobs: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_35/src/python/wheelhouse/* - $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_36/src/python/wheelhouse/* - $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_37/src/python/wheelhouse/* - $PYTHON38/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ build_38/src/python/wheelhouse/* \ No newline at end of file + $PYTHON38/bin/python -m twine upload build_35/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload build_36/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload build_37/src/python/wheelhouse/* + $PYTHON38/bin/python -m twine upload build_38/src/python/wheelhouse/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index 199e6f01..6861b65a 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -44,4 +44,4 @@ jobs: mkdir wheelhouse /Users/runner/.local/bin/delocate-listdeps build/src/python/dist/* /Users/runner/.local/bin/delocate-wheel --require-archs x86_64 -w wheelhouse build/src/python/dist/* - python -m twine upload --repository-url https://test.pypi.org/legacy/ wheelhouse/* \ No newline at end of file + python -m twine upload wheelhouse/* \ No newline at end of file diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index d529d4f3..68add90a 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -42,4 +42,4 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: python -m twine upload --repository-url https://test.pypi.org/legacy/ build/src/python/dist/* + run: python -m twine upload build/src/python/dist/* -- cgit v1.2.3 From 472c8d61db3e918ed6ad647e0330fdb68c7d5ac5 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 12 Jun 2020 08:53:24 +0200 Subject: Official version 3.2.0 as tests on OSx, Win and Linux pass --- CMakeGUDHIVersion.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index a06e871c..ac89fa4d 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,6 +1,6 @@ set (GUDHI_MAJOR_VERSION 3) set (GUDHI_MINOR_VERSION 2) -set (GUDHI_PATCH_VERSION 0rc1) +set (GUDHI_PATCH_VERSION 0) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) message(STATUS "GUDHI version : ${GUDHI_VERSION}") -- cgit v1.2.3 From 4ae747be301f1f159f36bd9d3c103774e1ad80ac Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Fri, 12 Jun 2020 09:19:19 +0200 Subject: Build pip packages on releases --- .github/workflows/pip-packaging-linux.yml | 8 ++------ .github/workflows/pip-packaging-osx.yml | 7 ++----- .github/workflows/pip-packaging-windows.yml | 7 ++----- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/.github/workflows/pip-packaging-linux.yml b/.github/workflows/pip-packaging-linux.yml index f82122b4..bd524af9 100644 --- a/.github/workflows/pip-packaging-linux.yml +++ b/.github/workflows/pip-packaging-linux.yml @@ -1,12 +1,8 @@ name: pip packaging linux on: - push: {} - pull_request: {} - -# on: -# release: -# types: [published] + release: + types: [published] jobs: build: diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index 6861b65a..85c3c807 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -1,11 +1,8 @@ name: pip packaging osx on: - push: {} - pull_request: {} -# on: -# release: -# types: [published] + release: + types: [published] jobs: build: diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 68add90a..1cadf6b1 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -1,11 +1,8 @@ name: pip packaging windows on: - push: {} - pull_request: {} -# on: -# release: -# types: [published] + release: + types: [published] jobs: build: -- cgit v1.2.3 From b50a41f53af6937c6f030d08f2b969b9024e17ac Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Fri, 12 Jun 2020 23:10:37 +0200 Subject: Mention pip package --- src/python/doc/installation.rst | 37 ++++++++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst index a66e910e..525ca84e 100644 --- a/src/python/doc/installation.rst +++ b/src/python/doc/installation.rst @@ -5,20 +5,47 @@ Installation ############ -Conda -***** -The easiest way to install the Python version of GUDHI is using -`conda `_. +Packages +******** +The easiest way to install the Python version of GUDHI is using pre-built packages. +We recommend `conda `_ + +.. code-block:: bash + + conda install -c conda-forge gudhi + +Gudhi is also available on `PyPI `_ + +.. code-block:: bash + + pip install gudhi + +Third party packages are also available, for instance on Debian or Ubuntu + +.. code-block:: bash + + apt install python3-gudhi + +In all cases, you may still want to install some of the optional `run time dependencies`_. Compiling ********* +These instructions are for people who want to compile gudhi from source, they are +unnecessary if you installed a binary package of Gudhi as above. They assume that +you have downloaded a `release `_, +with a name like `gudhi.3.2.0.tar.gz`, then run `tar xf gudhi.3.2.0.tar.gz`, which +created a directory `gudhi.3.2.0`, hereinafter referred to as `/path-to-gudhi/`. +If you are instead using a git checkout, beware that the paths are a bit +different, and in particular the `python/` subdirectory is actually `src/python/` +there. + The library uses c++14 and requires `Boost `_ :math:`\geq` 1.56.0, `CMake `_ :math:`\geq` 3.1 to generate makefiles, `NumPy `_, `Cython `_ and `pybind11 `_ to compile the GUDHI Python module. It is a multi-platform library and compiles on Linux, Mac OSX and Visual -Studio 2017. +Studio 2017 or later. On `Windows `_ , only Python :math:`\geq` 3.5 are available because of the required Visual Studio version. -- cgit v1.2.3 From 509963769e6e9d29e70dcc20118ea29f785b0a51 Mon Sep 17 00:00:00 2001 From: ROUVREAU Vincent Date: Tue, 16 Jun 2020 10:38:03 +0200 Subject: Fix #352 --- Dockerfile_for_circleci_image | 2 +- Dockerfile_gudhi_installation | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile_for_circleci_image b/Dockerfile_for_circleci_image index c2e8a8f5..464097e7 100644 --- a/Dockerfile_for_circleci_image +++ b/Dockerfile_for_circleci_image @@ -1,4 +1,4 @@ -FROM ubuntu:19.04 +FROM ubuntu:20.04 # Update and upgrade distribution RUN apt-get update && \ diff --git a/Dockerfile_gudhi_installation b/Dockerfile_gudhi_installation index 461a8a19..996dd06b 100644 --- a/Dockerfile_gudhi_installation +++ b/Dockerfile_gudhi_installation @@ -1,4 +1,4 @@ -FROM ubuntu:19.04 +FROM ubuntu:20.04 # Update and upgrade distribution RUN apt-get update && \ -- cgit v1.2.3