From 11aebf8f6ac0fe32c7909cedda6b383329f1fd6d Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Sun, 1 Nov 2020 22:02:59 +0100 Subject: added code, doc and test --- src/python/CMakeLists.txt | 3 +- src/python/gudhi/differentiation/__init__.py | 3 + src/python/gudhi/differentiation/tensorflow.py | 251 +++++++++++++++++++++++++ src/python/test/simplextree.txt | 21 +++ src/python/test/test_diff.py | 41 ++++ 5 files changed, 318 insertions(+), 1 deletion(-) create mode 100644 src/python/gudhi/differentiation/__init__.py create mode 100644 src/python/gudhi/differentiation/tensorflow.py create mode 100644 src/python/test/simplextree.txt create mode 100644 src/python/test/test_diff.py diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index c09996fe..44b6a93c 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -252,7 +252,8 @@ if(PYTHONINTERP_FOUND) # Other .py files file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/representations" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi/") - file(COPY "gudhi/wasserstein" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + file(COPY "gudhi/wasserstein" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + file(COPY "gudhi/differentiation" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/clustering" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi" FILES_MATCHING PATTERN "*.py") file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") diff --git a/src/python/gudhi/differentiation/__init__.py b/src/python/gudhi/differentiation/__init__.py new file mode 100644 index 00000000..0260ed97 --- /dev/null +++ b/src/python/gudhi/differentiation/__init__.py @@ -0,0 +1,3 @@ +from .tensorflow import * + +__all__ = ["SimplexTreeModel", "RipsModel", "CubicalModel"] diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py new file mode 100644 index 00000000..d3a35e62 --- /dev/null +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -0,0 +1,251 @@ +import numpy as np +import tensorflow as tf +import tensorflow_addons as tfa +from ..simplex_tree import SimplexTree +from ..rips_complex import RipsComplex +from ..cubical_complex import CubicalComplex + +# In this file, we write functions based on the Gudhi library that compute persistence diagrams associated to +# different filtrations (lower star, Rips, cubical), as well as the corresponding positive and negative +# simplices. We also wrap these functions into Tensorflow models. + + + +######################################### +# Lower star filtration on simplex tree # +######################################### + +# The parameters of the model are the vertex function values of the simplex tree. + +def _SimplexTree(stbase, fct, dim, card): + # Parameters: stbase (array containing the name of the file where the simplex tree is located) + # fct (function values on the vertices of stbase), + # dim (homological dimension), + # card (number of persistence diagram points, sorted by distance-to-diagonal) + + # Copy stbase in another simplex tree st + st = SimplexTree() + f = open(stbase[0], "r") + for line in f: + ints = line.split(" ") + s = [int(v) for v in ints[:-1]] + st.insert(s, -1e10) + f.close() + + # Assign new filtration values + for i in range(st.num_vertices()): + st.assign_filtration([i], fct[i]) + st.make_filtration_non_decreasing() + + # Compute persistence diagram + dgm = st.persistence() + + # Get vertex pairs for optimization. First, get all simplex pairs + pairs = st.persistence_pairs() + + # Then, loop over all simplex pairs + indices, pers = [], [] + for s1, s2 in pairs: + # Select pairs with good homological dimension and finite lifetime + if len(s1) == dim+1 and len(s2) > 0: + # Get IDs of the vertices corresponding to the filtration values of the simplices + l1, l2 = np.array(s1), np.array(s2) + i1 = l1[np.argmax(fct[l1])] + i2 = l2[np.argmax(fct[l2])] + indices.append(i1) + indices.append(i2) + # Compute lifetime + pers.append(st.filtration(s2) - st.filtration(s1)) + + # Sort vertex pairs wrt lifetime + perm = np.argsort(pers) + indices = list(np.reshape(indices, [-1,2])[perm][::-1,:].flatten()) + + # Pad vertex pairs + indices = indices[:2*card] + [0 for _ in range(0,max(0,2*card-len(indices)))] + return list(np.array(indices, dtype=np.int32)) + +class SimplexTreeModel(tf.keras.Model): + """ + TensorFlow model for computing lower-star persistence out of a simplex tree. + + Attributes: + F (TensorFlow variable): filter function values over the vertices of the simplex tree + stbase (string): path to the file containing the simplex tree + card (int): maximum number of points in the persistence diagram + dim (int): homology dimension + """ + def __init__(self, F, stbase="simplextree.txt", dim=0, card=50): + super(SimplexTreeModel, self).__init__() + self.F = F + self.dim = dim + self.card = card + self.st = stbase + + def call(self): + d, c = self.dim, self.card + st, fct = self.st, self.F + + # Turn STPers into a numpy function + SimplexTreeTF = lambda fct: tf.numpy_function(_SimplexTree, [np.array([st], dtype=str), fct, d, c], [tf.int32 for _ in range(2*c)]) + + # Don't try to compute gradients for the vertex pairs + fcts = tf.reshape(fct, [1, self.F.shape[0]]) + inds = tf.nest.map_structure(tf.stop_gradient, tf.map_fn(SimplexTreeTF, + fcts, dtype=[tf.int32 for _ in range(2*c)])) + + # Get persistence diagram + self.dgm = tf.reshape(tf.gather_nd(self.F, inds), [c,2]) + return self.dgm + + + + + + + + + + +############################ +# Vietoris-Rips filtration # +############################ + +# The parameters of the model are the point coordinates. + +def _Rips(DX, mel, dim, card): + # Parameters: DX (distance matrix), + # mel (maximum edge length for Rips filtration), + # dim (homological dimension), + # card (number of persistence diagram points, sorted by distance-to-diagonal) + + # Compute the persistence pairs with Gudhi + rc = RipsComplex(distance_matrix=DX, max_edge_length=mel) + st = rc.create_simplex_tree(max_dimension=dim+1) + dgm = st.persistence() + pairs = st.persistence_pairs() + + # Retrieve vertices v_a and v_b by picking the ones achieving the maximal + # distance among all pairwise distances between the simplex vertices + indices, pers = [], [] + for s1, s2 in pairs: + if len(s1) == dim+1 and len(s2) > 0: + l1, l2 = np.array(s1), np.array(s2) + i1 = [s1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(s1), len(s1)])] + i2 = [s2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(s2), len(s2)])] + indices += i1 + indices += i2 + pers.append(st.filtration(s2) - st.filtration(s1)) + + # Sort points with distance-to-diagonal + perm = np.argsort(pers) + indices = list(np.reshape(indices, [-1,4])[perm][::-1,:].flatten()) + + # Output indices + indices = indices[:4*card] + [0 for _ in range(0,max(0,4*card-len(indices)))] + return list(np.array(indices, dtype=np.int32)) + +class RipsModel(tf.keras.Model): + """ + TensorFlow model for computing Rips persistence out of a point cloud. + + Attributes: + X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] + mel (float): maximum edge length for the Rips complex + card (int): maximum number of points in the persistence diagram + dim (int): homology dimension + """ + def __init__(self, X, mel=12, dim=1, card=50): + super(RipsModel, self).__init__() + self.X = X + self.mel = mel + self.dim = dim + self.card = card + + def call(self): + m, d, c = self.mel, self.dim, self.card + + # Compute distance matrix + DX = tfa.losses.metric_learning.pairwise_distance(self.X) + DXX = tf.reshape(DX, [1, DX.shape[0], DX.shape[1]]) + + # Turn numpy function into tensorflow function + RipsTF = lambda DX: tf.numpy_function(_Rips, [DX, m, d, c], [tf.int32 for _ in range(4*c)]) + + # Compute vertices associated to positive and negative simplices + # Don't compute gradient for this operation + ids = tf.nest.map_structure(tf.stop_gradient, tf.map_fn(RipsTF,DXX,dtype=[tf.int32 for _ in range(4*c)])) + + # Get persistence diagram by simply picking the corresponding entries in the distance matrix + dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(ids, [2*c,2])), [c,2]) + return dgm + + + + + + + + + +###################### +# Cubical filtration # +###################### + +# The parameters of the model are the pixel values. + +def _Cubical(X, dim, card): + # Parameters: X (image), + # dim (homological dimension), + # card (number of persistence diagram points, sorted by distance-to-diagonal) + + # Compute the persistence pairs with Gudhi + cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) + cc.persistence() + cof = cc.cofaces_of_persistence_pairs()[0][dim] + + # Sort points with distance-to-diagonal + Xs = X.shape + pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] + perm = np.argsort(pers) + cof = cof[perm[::-1]] + + # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices + D = len(Xs) + ocof = np.array([0 for _ in range(D*card*2)]) + count = 0 + for idx in range(0,min(2*card, 2*cof.shape[0]),2): + ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) + ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) + count += 1 + return list(np.array(ocof, dtype=np.int32)) + +class CubicalModel(tf.keras.Model): + """ + TensorFlow model for computing cubical persistence out of a cubical complex. + + Attributes: + X (TensorFlow variable): pixel values of the cubical complex + card (int): maximum number of points in the persistence diagram + dim (int): homology dimension + """ + def __init__(self, X, dim=1, card=50): + super(CubicalModel, self).__init__() + self.X = X + self.dim = dim + self.card = card + + def call(self): + d, c, D = self.dim, self.card, len(self.X.shape) + XX = tf.reshape(self.X, [1, self.X.shape[0], self.X.shape[1]]) + + # Turn numpy function into tensorflow function + CbTF = lambda X: tf.numpy_function(_Cubical, [X, d, c], [tf.int32 for _ in range(2*D*c)]) + + # Compute pixels associated to positive and negative simplices + # Don't compute gradient for this operation + inds = tf.nest.map_structure(tf.stop_gradient, tf.map_fn(CbTF,XX,dtype=[tf.int32 for _ in range(2*D*c)])) + + # Get persistence diagram by simply picking the corresponding entries in the image + dgm = tf.reshape(tf.gather_nd(self.X, tf.reshape(inds, [-1,D])), [-1,2]) + return dgm diff --git a/src/python/test/simplextree.txt b/src/python/test/simplextree.txt new file mode 100644 index 00000000..e0dfcdd9 --- /dev/null +++ b/src/python/test/simplextree.txt @@ -0,0 +1,21 @@ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +0 1 +1 2 +2 3 +3 4 +4 5 +5 6 +6 7 +7 8 +8 9 +9 10 diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py new file mode 100644 index 00000000..56277b74 --- /dev/null +++ b/src/python/test/test_diff.py @@ -0,0 +1,41 @@ +from gudhi.differentiation import * +import numpy as np +import tensorflow as tf + +def test_rips_diff(): + + Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32) + X = tf.Variable(initial_value=Xinit, trainable=True) + model = RipsModel(X=X, mel=2., dim=0, card=10) + + with tf.GradientTape() as tape: + dgm = model.call() + loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) + assert np.abs(grads[0].numpy()-np.array([[-.5,-.5],[.5,.5]])).sum() <= 1e-6 + + +def test_cubical_diff(): + + Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32) + X = tf.Variable(initial_value=Xinit, trainable=True) + model = CubicalModel(X, dim=0, card=10) + + with tf.GradientTape() as tape: + dgm = model.call() + loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) + assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 + +def test_st_diff(): + + Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) + F = tf.Variable(initial_value=Finit, trainable=True) + model = SimplexTreeModel(F, stbase="simplextree.txt", dim=0, card=10) + + with tf.GradientTape() as tape: + dgm = model.call() + loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [F]) + assert np.array_equal(np.array(grads[0].indices), np.array([2,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0])) + assert np.array_equal(np.array(grads[0].values), np.array([-1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0])) -- cgit v1.2.3 From e1cb6364264e04404d49b8279edcd085bab3e5f4 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 7 Dec 2020 21:03:22 +0100 Subject: added doc --- src/python/CMakeLists.txt | 6 ++++++ src/python/doc/differentiation.rst | 18 ++++++++++++++++++ src/python/doc/differentiation_sum.inc | 14 ++++++++++++++ src/python/doc/img/ripsTF.png | Bin 0 -> 38696 bytes 4 files changed, 38 insertions(+) create mode 100644 src/python/doc/differentiation.rst create mode 100644 src/python/doc/differentiation_sum.inc create mode 100644 src/python/doc/img/ripsTF.png diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 45530e41..d9af9ec5 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -65,6 +65,7 @@ if(PYTHONINTERP_FOUND) set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'euclidean_strong_witness_complex', ") # Modules that should not be auto-imported in __init__.py set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'representations', ") + set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'differentiation', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'wasserstein', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'point_cloud', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'weighted_rips_complex', ") @@ -509,6 +510,11 @@ if(PYTHONINTERP_FOUND) add_gudhi_py_test(test_representations) endif() + # Differentiation + if(TENSORFLOW_FOUND) + add_gudhi_py_test(test_diff) + endif() + # Time Delay add_gudhi_py_test(test_time_delay) diff --git a/src/python/doc/differentiation.rst b/src/python/doc/differentiation.rst new file mode 100644 index 00000000..906a9965 --- /dev/null +++ b/src/python/doc/differentiation.rst @@ -0,0 +1,18 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +====================== +Differentiation manual +====================== + +.. include:: differentiation_sum.inc + +In this module, we provide neural network models for computing persistent homology. In particular, we provide TensorFlow 2 models that allow to compute persistence diagrams from complexes available in the Gudhi library, including simplex trees, cubical complexes and Vietoris-Rips complexes. These models can be incorporated at each step of a given neural network architecture, and can be used in addition to `PersLay `_ to produce topological features. + +TensorFlow models +----------------- +.. automodule:: gudhi.differentiation + :members: + :special-members: + :show-inheritance: diff --git a/src/python/doc/differentiation_sum.inc b/src/python/doc/differentiation_sum.inc new file mode 100644 index 00000000..30188e0b --- /dev/null +++ b/src/python/doc/differentiation_sum.inc @@ -0,0 +1,14 @@ +.. table:: + :widths: 30 40 30 + + +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+ + | .. figure:: | Deep learning models for differentiating persistence diagrams. | :Author: Mathieu Carrière | + | img/ripsTF.png | | | + | | | :Since: GUDHI 3.1.0 | + | | | | + | | | :License: MIT | + | | | | + | | | :Requires: `TensorFlow 2 `_ | + +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+ + | * :doc:`differentiation` | + +------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/img/ripsTF.png b/src/python/doc/img/ripsTF.png new file mode 100644 index 00000000..3c5c77a7 Binary files /dev/null and b/src/python/doc/img/ripsTF.png differ -- cgit v1.2.3 From 75721d65e870162df3ab0fa59c46fc357302a58a Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Tue, 15 Dec 2020 20:31:36 +0100 Subject: small change of model name + doc --- src/python/gudhi/differentiation/__init__.py | 2 +- src/python/gudhi/differentiation/tensorflow.py | 6 +++--- src/python/test/test_diff.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/python/gudhi/differentiation/__init__.py b/src/python/gudhi/differentiation/__init__.py index 0260ed97..6793e904 100644 --- a/src/python/gudhi/differentiation/__init__.py +++ b/src/python/gudhi/differentiation/__init__.py @@ -1,3 +1,3 @@ from .tensorflow import * -__all__ = ["SimplexTreeModel", "RipsModel", "CubicalModel"] +__all__ = ["LowerStarSimplexTreeModel", "RipsModel", "CubicalModel"] diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py index d3a35e62..372172c4 100644 --- a/src/python/gudhi/differentiation/tensorflow.py +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -65,13 +65,13 @@ def _SimplexTree(stbase, fct, dim, card): indices = indices[:2*card] + [0 for _ in range(0,max(0,2*card-len(indices)))] return list(np.array(indices, dtype=np.int32)) -class SimplexTreeModel(tf.keras.Model): +class LowerStarSimplexTreeModel(tf.keras.Model): """ - TensorFlow model for computing lower-star persistence out of a simplex tree. + TensorFlow model for computing lower-star persistence out of a simplex tree. Since simplex trees cannot be easily encoded as TensorFlow variables, the model takes as input a path to a file containing the simplex tree simplices, and read it each time the simplex tree is required for computations. Attributes: F (TensorFlow variable): filter function values over the vertices of the simplex tree - stbase (string): path to the file containing the simplex tree + stbase (string): path to the file containing the simplex tree. Each line of the file should represent a simplex as a sequence of integers separated by spaces card (int): maximum number of points in the persistence diagram dim (int): homology dimension """ diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index 56277b74..de738579 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -31,7 +31,7 @@ def test_st_diff(): Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) F = tf.Variable(initial_value=Finit, trainable=True) - model = SimplexTreeModel(F, stbase="simplextree.txt", dim=0, card=10) + model = LowerStarSimplexTreeModel(F, stbase="simplextree.txt", dim=0, card=10) with tf.GradientTape() as tape: dgm = model.call() -- cgit v1.2.3 From 6a062a47633694a5120e6991086d53b85898ac19 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Tue, 2 Feb 2021 20:55:37 +0100 Subject: small fix on Cubical --- src/python/gudhi/differentiation/tensorflow.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py index 372172c4..b01a91f8 100644 --- a/src/python/gudhi/differentiation/tensorflow.py +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -202,13 +202,17 @@ def _Cubical(X, dim, card): # Compute the persistence pairs with Gudhi cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) cc.persistence() - cof = cc.cofaces_of_persistence_pairs()[0][dim] - - # Sort points with distance-to-diagonal - Xs = X.shape - pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] - perm = np.argsort(pers) - cof = cof[perm[::-1]] + try: + cof = cc.cofaces_of_persistence_pairs()[0][dim] + except IndexError: + cof = np.array([]) + + if len(cof) > 0: + # Sort points with distance-to-diagonal + Xs = X.shape + pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] + perm = np.argsort(pers) + cof = cof[perm[::-1]] # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices D = len(Xs) -- cgit v1.2.3 From ec0f360a3a4dc11195e3351fd76bef012d233ee6 Mon Sep 17 00:00:00 2001 From: Mathieu Carrière Date: Wed, 3 Feb 2021 14:19:36 +0100 Subject: error fix --- src/python/gudhi/differentiation/tensorflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py index b01a91f8..b7d68fae 100644 --- a/src/python/gudhi/differentiation/tensorflow.py +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -76,7 +76,7 @@ class LowerStarSimplexTreeModel(tf.keras.Model): dim (int): homology dimension """ def __init__(self, F, stbase="simplextree.txt", dim=0, card=50): - super(SimplexTreeModel, self).__init__() + super(LowerStarSimplexTreeModel, self).__init__() self.F = F self.dim = dim self.card = card -- cgit v1.2.3 From c080d71bae7f2239f0a85910efc67e65da3ba36e Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Fri, 5 Feb 2021 00:19:03 +0100 Subject: avoid tensorflow addons dependency --- src/python/gudhi/differentiation/tensorflow.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py index b7d68fae..b8f6461f 100644 --- a/src/python/gudhi/differentiation/tensorflow.py +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -1,6 +1,5 @@ import numpy as np import tensorflow as tf -import tensorflow_addons as tfa from ..simplex_tree import SimplexTree from ..rips_complex import RipsComplex from ..cubical_complex import CubicalComplex @@ -166,7 +165,7 @@ class RipsModel(tf.keras.Model): m, d, c = self.mel, self.dim, self.card # Compute distance matrix - DX = tfa.losses.metric_learning.pairwise_distance(self.X) + DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(self.X, 1)-tf.expand_dims(self.X, 0))**2, 2)) DXX = tf.reshape(DX, [1, DX.shape[0], DX.shape[1]]) # Turn numpy function into tensorflow function -- cgit v1.2.3 From f926b2c1e8782a47f2b79342c388f4f68bd5d2ca Mon Sep 17 00:00:00 2001 From: Mathieu Carriere Date: Sat, 13 Feb 2021 23:35:31 +0100 Subject: new try to fix errors --- data/filtered_simplicial_complex/simplextree.txt | 21 +++++++++++++++++++++ src/python/gudhi/differentiation/tensorflow.py | 6 +++++- src/python/test/simplextree.txt | 21 --------------------- src/python/test/test_diff.py | 2 +- 4 files changed, 27 insertions(+), 23 deletions(-) create mode 100644 data/filtered_simplicial_complex/simplextree.txt delete mode 100644 src/python/test/simplextree.txt diff --git a/data/filtered_simplicial_complex/simplextree.txt b/data/filtered_simplicial_complex/simplextree.txt new file mode 100644 index 00000000..e0dfcdd9 --- /dev/null +++ b/data/filtered_simplicial_complex/simplextree.txt @@ -0,0 +1,21 @@ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +0 1 +1 2 +2 3 +3 4 +4 5 +5 6 +6 7 +7 8 +8 9 +9 10 diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py index b8f6461f..4e08216d 100644 --- a/src/python/gudhi/differentiation/tensorflow.py +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -176,7 +176,11 @@ class RipsModel(tf.keras.Model): ids = tf.nest.map_structure(tf.stop_gradient, tf.map_fn(RipsTF,DXX,dtype=[tf.int32 for _ in range(4*c)])) # Get persistence diagram by simply picking the corresponding entries in the distance matrix - dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(ids, [2*c,2])), [c,2]) + if d > 0: + dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(ids, [2*c,2])), [c,2]) + else: + ids = tf.reshape(ids, [2*c,2])[1::2,:] + dgm = tf.concat([tf.zeros([c,1]), tf.reshape(tf.gather_nd(DX, ids), [c,1])], axis=1) return dgm diff --git a/src/python/test/simplextree.txt b/src/python/test/simplextree.txt deleted file mode 100644 index e0dfcdd9..00000000 --- a/src/python/test/simplextree.txt +++ /dev/null @@ -1,21 +0,0 @@ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -0 1 -1 2 -2 3 -3 4 -4 5 -5 6 -6 7 -7 8 -8 9 -9 10 diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index de738579..d42e25cd 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -31,7 +31,7 @@ def test_st_diff(): Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) F = tf.Variable(initial_value=Finit, trainable=True) - model = LowerStarSimplexTreeModel(F, stbase="simplextree.txt", dim=0, card=10) + model = LowerStarSimplexTreeModel(F, stbase="../../../data/filtered_simplicial_complex/simplextree.txt", dim=0, card=10) with tf.GradientTape() as tape: dgm = model.call() -- cgit v1.2.3 From f0c12fbdce04d09bf13b141d549e5e385c64caad Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 1 Jun 2021 18:39:31 +0200 Subject: First version allowing to fetch remote datasets --- src/python/CMakeLists.txt | 2 + src/python/gudhi/datasets/remote.py | 85 +++++++++++++++++++++++++++++++++ src/python/test/test_remote_datasets.py | 22 +++++++++ 3 files changed, 109 insertions(+) create mode 100644 src/python/gudhi/datasets/remote.py create mode 100644 src/python/test/test_remote_datasets.py diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 98f2b85f..6f117588 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -542,6 +542,8 @@ if(PYTHONINTERP_FOUND) add_gudhi_py_test(test_dtm_rips_complex) endif() + # Fetch remote datasets + add_gudhi_py_test(test_remote_datasets) # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py new file mode 100644 index 00000000..27076785 --- /dev/null +++ b/src/python/gudhi/datasets/remote.py @@ -0,0 +1,85 @@ +# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. +# Author(s): Hind Montassif +# +# Copyright (C) 2021 Inria +# +# Modification(s): +# - YYYY/MM Author: Description of the modification + +import hashlib + +from os.path import join, exists +from os import makedirs + +from urllib.request import urlretrieve + + +def _checksum_sha256(file_path): + """ + Compute the file checksum using sha256 + + Parameters + ---------- + file_path: string + Full path of the created file. + + Returns + ------- + The hex digest of file_path + """ + sha256_hash = hashlib.sha256() + chunk_size = 4096 + with open(file_path,"rb") as f: + # Read and update hash string value in blocks of 4K + while True: + buffer = f.read(chunk_size) + if not buffer: + break + sha256_hash.update(buffer) + return sha256_hash.hexdigest() + +def fetch(url, filename, dirname = "remote_datasets", checksum_flag = False, file_checksum = None): + """ + Fetch the wanted dataset from the given url and save it in file_path + + Parameters + ---------- + url : string + The url to fetch the dataset from + filename : string + The filename to download + dirname : string + The directory to save the file to. + checksum_flag : boolean + To set if the user wants the file checksum. Default is 'False'. + Note that if checksum_flag is set to 'True', the file_checksum must be provided. + file_checksum : string + The file checksum using sha256 to check against the one computed on the downloaded file. + To be considered, checksum_flag must be set to 'True'. + Default is 'None'. + + Returns + ------- + file_path: string + Full path of the created file. + """ + if not exists(dirname): + makedirs(dirname) + + file_path = join(dirname, filename) + + urlretrieve(url, file_path) + + if (checksum_flag): + if file_checksum is None: + raise ValueError("The file checksum must be provided - different from None - for the check to be performed.") + + checksum = _checksum_sha256(file_path) + if file_checksum != checksum: + raise IOError("{} has a SHA256 checksum : {}, " + "different from expected : {}." + "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, + file_checksum)) + + return file_path diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py new file mode 100644 index 00000000..c4e752a7 --- /dev/null +++ b/src/python/test/test_remote_datasets.py @@ -0,0 +1,22 @@ +# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. +# Author(s): Hind Montassif +# +# Copyright (C) 2021 Inria +# +# Modification(s): +# - YYYY/MM Author: Description of the modification + + +from gudhi.datasets import remote + +def test_fetch_remote_datasets(): + # Test files download from given urls + assert 'remote_datasets/spiral_2d.csv' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv") + assert 'remote_datasets/sphere3D_pts_on_grid.off' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") + + # Test files download with checksums provided + assert 'remote_datasets/spiral_2d.csv' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv", checksum_flag = True, + file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + assert 'remote_datasets/sphere3D_pts_on_grid.off' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", + checksum_flag = True, file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') -- cgit v1.2.3 From c2f0cf79af04ea3586a70c0a121a200353e989ac Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 2 Jun 2021 11:30:09 +0200 Subject: Add wrapping function to fecth spiral_2d.csv directly --- src/python/gudhi/datasets/remote.py | 24 +++++++++++++++++++++--- src/python/test/test_remote_datasets.py | 3 +++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 27076785..4a300b15 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -46,11 +46,11 @@ def fetch(url, filename, dirname = "remote_datasets", checksum_flag = False, fil Parameters ---------- url : string - The url to fetch the dataset from + The url to fetch the dataset from. filename : string - The filename to download + The name to give to downloaded file. dirname : string - The directory to save the file to. + The directory to save the file to. Default is "remote_datasets". checksum_flag : boolean To set if the user wants the file checksum. Default is 'False'. Note that if checksum_flag is set to 'True', the file_checksum must be provided. @@ -83,3 +83,21 @@ def fetch(url, filename, dirname = "remote_datasets", checksum_flag = False, fil file_checksum)) return file_path + +def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): + """ + Fetch spiral_2d.csv remotely + + Parameters + ---------- + filename : string + The name to give to downloaded file. Default is "spiral_2d.csv" + dirname : string + The directory to save the file to. Default is "remote_datasets". + + Returns + ------- + file_path: string + Full path of the created file. + """ + return fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", filename, dirname, True, '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index c4e752a7..dc854e25 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -20,3 +20,6 @@ def test_fetch_remote_datasets(): file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') assert 'remote_datasets/sphere3D_pts_on_grid.off' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", checksum_flag = True, file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') + + # Test spiral_2d.csv wrapping function + assert 'remote_datasets/spiral_2d.csv' == remote.fetch_spiral_2d() -- cgit v1.2.3 From baa2e67036dae8ec63321a4d9ff4e913780a8757 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 2 Jun 2021 16:00:40 +0200 Subject: Modify test to consider both slash and backslash in the returned file path --- src/python/test/test_remote_datasets.py | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index dc854e25..a822ebaa 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -9,17 +9,36 @@ from gudhi.datasets import remote +import re def test_fetch_remote_datasets(): # Test files download from given urls - assert 'remote_datasets/spiral_2d.csv' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv") - assert 'remote_datasets/sphere3D_pts_on_grid.off' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") + path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv") + names_dw = re.split(r' |/|\\', path_file_dw) + assert 'remote_datasets' == names_dw[0] + assert 'spiral_2d.csv' == names_dw[1] + + path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") + names_dw = re.split(r' |/|\\', path_file_dw) + assert 'remote_datasets' == names_dw[0] + assert 'sphere3D_pts_on_grid.off' == names_dw[1] + # Test files download with checksums provided - assert 'remote_datasets/spiral_2d.csv' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv", checksum_flag = True, + path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv", checksum_flag = True, file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') - assert 'remote_datasets/sphere3D_pts_on_grid.off' == remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", + names_dw = re.split(r' |/|\\', path_file_dw) + assert 'remote_datasets' == names_dw[0] + assert 'spiral_2d.csv' == names_dw[1] + + path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", checksum_flag = True, file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') + names_dw = re.split(r' |/|\\', path_file_dw) + assert 'remote_datasets' == names_dw[0] + assert 'sphere3D_pts_on_grid.off' == names_dw[1] # Test spiral_2d.csv wrapping function - assert 'remote_datasets/spiral_2d.csv' == remote.fetch_spiral_2d() + path_file_dw = remote.fetch_spiral_2d() + names_dw = re.split(r' |/|\\', path_file_dw) + assert 'remote_datasets' == names_dw[0] + assert 'spiral_2d.csv' == names_dw[1] -- cgit v1.2.3 From 3ee453718eebc7274b19caef4b79d8ec2754d583 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 3 Jun 2021 16:40:59 +0200 Subject: Modify urls to point to GUDHI/gudhi-data repo --- src/python/gudhi/datasets/remote.py | 3 ++- src/python/test/test_remote_datasets.py | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 4a300b15..525a7b66 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -100,4 +100,5 @@ def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): file_path: string Full path of the created file. """ - return fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", filename, dirname, True, '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + return fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", filename, dirname, True, + '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index a822ebaa..63ad7885 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -13,25 +13,25 @@ import re def test_fetch_remote_datasets(): # Test files download from given urls - path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv") + path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv") names_dw = re.split(r' |/|\\', path_file_dw) assert 'remote_datasets' == names_dw[0] assert 'spiral_2d.csv' == names_dw[1] - path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") + path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") names_dw = re.split(r' |/|\\', path_file_dw) assert 'remote_datasets' == names_dw[0] assert 'sphere3D_pts_on_grid.off' == names_dw[1] # Test files download with checksums provided - path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/spiral_2d.csv", "spiral_2d.csv", checksum_flag = True, + path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", checksum_flag = True, file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') names_dw = re.split(r' |/|\\', path_file_dw) assert 'remote_datasets' == names_dw[0] assert 'spiral_2d.csv' == names_dw[1] - path_file_dw = remote.fetch("https://raw.githubusercontent.com/Hind-M/gudhi-data/main/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", + path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", checksum_flag = True, file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') names_dw = re.split(r' |/|\\', path_file_dw) assert 'remote_datasets' == names_dw[0] -- cgit v1.2.3 From bbe2e25a204be50eb422db71b4cf314b92797d4e Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 4 Jun 2021 12:21:11 +0200 Subject: Remove checksum_flag parameter and use value of 'file_checksum is not None' instead --- src/python/gudhi/datasets/remote.py | 13 +++---------- src/python/test/test_remote_datasets.py | 7 ++++--- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 525a7b66..fdd20f74 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -39,7 +39,7 @@ def _checksum_sha256(file_path): sha256_hash.update(buffer) return sha256_hash.hexdigest() -def fetch(url, filename, dirname = "remote_datasets", checksum_flag = False, file_checksum = None): +def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): """ Fetch the wanted dataset from the given url and save it in file_path @@ -51,12 +51,8 @@ def fetch(url, filename, dirname = "remote_datasets", checksum_flag = False, fil The name to give to downloaded file. dirname : string The directory to save the file to. Default is "remote_datasets". - checksum_flag : boolean - To set if the user wants the file checksum. Default is 'False'. - Note that if checksum_flag is set to 'True', the file_checksum must be provided. file_checksum : string The file checksum using sha256 to check against the one computed on the downloaded file. - To be considered, checksum_flag must be set to 'True'. Default is 'None'. Returns @@ -71,10 +67,7 @@ def fetch(url, filename, dirname = "remote_datasets", checksum_flag = False, fil urlretrieve(url, file_path) - if (checksum_flag): - if file_checksum is None: - raise ValueError("The file checksum must be provided - different from None - for the check to be performed.") - + if file_checksum is not None: checksum = _checksum_sha256(file_path) if file_checksum != checksum: raise IOError("{} has a SHA256 checksum : {}, " @@ -100,5 +93,5 @@ def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): file_path: string Full path of the created file. """ - return fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", filename, dirname, True, + return fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", filename, dirname, '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 63ad7885..6c9217c8 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -25,14 +25,15 @@ def test_fetch_remote_datasets(): # Test files download with checksums provided - path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", checksum_flag = True, - file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", + file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') names_dw = re.split(r' |/|\\', path_file_dw) assert 'remote_datasets' == names_dw[0] assert 'spiral_2d.csv' == names_dw[1] path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", - checksum_flag = True, file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') + file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') + names_dw = re.split(r' |/|\\', path_file_dw) assert 'remote_datasets' == names_dw[0] assert 'sphere3D_pts_on_grid.off' == names_dw[1] -- cgit v1.2.3 From f7b4d9f3ed0b0c386204077ea53a22e2dba527fc Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 4 Jun 2021 15:06:57 +0200 Subject: Check if the wanted file already exists locally before downloading --- src/python/gudhi/datasets/remote.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index fdd20f74..b266467d 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -60,20 +60,24 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): file_path: string Full path of the created file. """ - if not exists(dirname): - makedirs(dirname) file_path = join(dirname, filename) - urlretrieve(url, file_path) - - if file_checksum is not None: - checksum = _checksum_sha256(file_path) - if file_checksum != checksum: - raise IOError("{} has a SHA256 checksum : {}, " - "different from expected : {}." - "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, - file_checksum)) + # Check that an existing file does not already exist at file_path + if not exists(file_path): + # Create directory if not existing + if not exists(dirname): + makedirs(dirname) + + urlretrieve(url, file_path) + + if file_checksum is not None: + checksum = _checksum_sha256(file_path) + if file_checksum != checksum: + raise IOError("{} has a SHA256 checksum : {}, " + "different from expected : {}." + "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, + file_checksum)) return file_path -- cgit v1.2.3 From 16867ca9321e50531307253e957b91c4df7e564c Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 4 Jun 2021 16:39:16 +0200 Subject: Verify checksum even for already existing files locally --- src/python/gudhi/datasets/remote.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index b266467d..aef4b277 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -63,7 +63,7 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): file_path = join(dirname, filename) - # Check that an existing file does not already exist at file_path + # Check for an already existing file at file_path if not exists(file_path): # Create directory if not existing if not exists(dirname): @@ -71,13 +71,12 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): urlretrieve(url, file_path) - if file_checksum is not None: - checksum = _checksum_sha256(file_path) - if file_checksum != checksum: - raise IOError("{} has a SHA256 checksum : {}, " - "different from expected : {}." - "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, - file_checksum)) + if file_checksum is not None: + checksum = _checksum_sha256(file_path) + if file_checksum != checksum: + raise IOError("{} has a SHA256 checksum : {}, " + "different from expected : {}." + "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, file_checksum)) return file_path -- cgit v1.2.3 From 82524c5b0a6ab02b020574b2200a8721f3ed424c Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 7 Jun 2021 15:03:14 +0200 Subject: Add test with wrong checksum Add functions to avoid redundant code --- src/python/test/test_remote_datasets.py | 43 +++++++++++++++++---------------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 6c9217c8..e252980d 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -10,36 +10,37 @@ from gudhi.datasets import remote import re +import os.path +import pytest -def test_fetch_remote_datasets(): - # Test files download from given urls - path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv") - names_dw = re.split(r' |/|\\', path_file_dw) - assert 'remote_datasets' == names_dw[0] - assert 'spiral_2d.csv' == names_dw[1] +def check_dir_file_names(path_file_dw, filename, dirname): + assert os.path.isfile(path_file_dw) - path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") names_dw = re.split(r' |/|\\', path_file_dw) - assert 'remote_datasets' == names_dw[0] - assert 'sphere3D_pts_on_grid.off' == names_dw[1] + assert dirname == names_dw[0] + assert filename == names_dw[1] +def check_fetch_output(url, filename, dirname = "remote_datasets", file_checksum = None): + path_file_dw = remote.fetch(url, filename, dirname, file_checksum) + check_dir_file_names(path_file_dw, filename, dirname) + +def test_fetch_remote_datasets(): + # Test fetch with a wrong checksum + with pytest.raises(OSError): + check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", file_checksum = 'XXXXXXXXXX') - # Test files download with checksums provided - path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", + # Test files download from given urls with checksums provided + check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') - names_dw = re.split(r' |/|\\', path_file_dw) - assert 'remote_datasets' == names_dw[0] - assert 'spiral_2d.csv' == names_dw[1] - path_file_dw = remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", + check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') - names_dw = re.split(r' |/|\\', path_file_dw) - assert 'remote_datasets' == names_dw[0] - assert 'sphere3D_pts_on_grid.off' == names_dw[1] + # Test files download from given urls without checksums + check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv") + + check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") # Test spiral_2d.csv wrapping function path_file_dw = remote.fetch_spiral_2d() - names_dw = re.split(r' |/|\\', path_file_dw) - assert 'remote_datasets' == names_dw[0] - assert 'spiral_2d.csv' == names_dw[1] + check_dir_file_names(path_file_dw, 'spiral_2d.csv', 'remote_datasets') -- cgit v1.2.3 From 2bd2f8134daeb65a9fff730fef75c323320faefb Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 8 Jun 2021 10:47:45 +0200 Subject: Fix incorrect comments relative to Cech --- src/Cech_complex/concept/SimplicialComplexForCech.h | 4 ++-- src/Cech_complex/example/cech_complex_step_by_step.cpp | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/Cech_complex/concept/SimplicialComplexForCech.h b/src/Cech_complex/concept/SimplicialComplexForCech.h index 00c7df3a..6202fe92 100644 --- a/src/Cech_complex/concept/SimplicialComplexForCech.h +++ b/src/Cech_complex/concept/SimplicialComplexForCech.h @@ -47,8 +47,8 @@ struct SimplicialComplexForCech { }; -} // namespace alpha_complex +} // namespace cech_complex } // namespace Gudhi -#endif // CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_ +#endif // CONCEPT_CECH_COMPLEX_SIMPLICIAL_COMPLEX_FOR_CECH_H_ diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index f59f0293..60ae9712 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -24,9 +24,9 @@ #include // ---------------------------------------------------------------------------- -// rips_persistence_step_by_step is an example of each step that is required to -// build a Rips over a Simplex_tree. Please refer to rips_persistence to see -// how to do the same thing with the Rips_complex wrapper for less detailed +// cech_complex_step_by_step is an example of each step that is required to +// build a Cech over a Simplex_tree. Please refer to cech_complex_example_from_points to see +// how to do the same thing with the Cech complex wrapper for less detailed // steps. // ---------------------------------------------------------------------------- @@ -89,7 +89,7 @@ int main(int argc, char* argv[]) { Proximity_graph prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), max_radius, Gudhi::Minimal_enclosing_ball_radius()); - // Construct the Rips complex in a Simplex Tree + // Construct the Cech complex in a Simplex Tree Simplex_tree st; // insert the proximity graph in the simplex tree st.insert_graph(prox_graph); @@ -129,9 +129,9 @@ void program_options(int argc, char* argv[], std::string& off_file_points, Filtr visible.add_options()("help,h", "produce help message")( "max-radius,r", po::value(&max_radius)->default_value(std::numeric_limits::infinity()), - "Maximal length of an edge for the Rips complex construction.")( + "Maximal length of an edge for the Cech complex construction.")( "cpx-dimension,d", po::value(&dim_max)->default_value(1), - "Maximal dimension of the Rips complex we want to compute."); + "Maximal dimension of the Cech complex we want to compute."); po::positional_options_description pos; pos.add("input-file", 1); -- cgit v1.2.3 From 6e2b5caf7fe0f255dbafa70d6cad62ec4d7277a3 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 5 Jul 2021 17:42:54 +0200 Subject: removed padding --- data/filtered_simplicial_complex/simplextree.txt | 21 --- src/python/gudhi/differentiation/__init__.py | 2 +- src/python/gudhi/differentiation/tensorflow.py | 225 ++++++++++------------- src/python/test/test_diff.py | 52 ++++-- 4 files changed, 139 insertions(+), 161 deletions(-) delete mode 100644 data/filtered_simplicial_complex/simplextree.txt diff --git a/data/filtered_simplicial_complex/simplextree.txt b/data/filtered_simplicial_complex/simplextree.txt deleted file mode 100644 index e0dfcdd9..00000000 --- a/data/filtered_simplicial_complex/simplextree.txt +++ /dev/null @@ -1,21 +0,0 @@ -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -0 1 -1 2 -2 3 -3 4 -4 5 -5 6 -6 7 -7 8 -8 9 -9 10 diff --git a/src/python/gudhi/differentiation/__init__.py b/src/python/gudhi/differentiation/__init__.py index 6793e904..3b7790e4 100644 --- a/src/python/gudhi/differentiation/__init__.py +++ b/src/python/gudhi/differentiation/__init__.py @@ -1,3 +1,3 @@ from .tensorflow import * -__all__ = ["LowerStarSimplexTreeModel", "RipsModel", "CubicalModel"] +__all__ = ["LowerStarSimplexTreeLayer", "RipsLayer", "CubicalLayer"] diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py index 4e08216d..0f5df9a2 100644 --- a/src/python/gudhi/differentiation/tensorflow.py +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -1,6 +1,5 @@ import numpy as np import tensorflow as tf -from ..simplex_tree import SimplexTree from ..rips_complex import RipsComplex from ..cubical_complex import CubicalComplex @@ -16,85 +15,69 @@ from ..cubical_complex import CubicalComplex # The parameters of the model are the vertex function values of the simplex tree. -def _SimplexTree(stbase, fct, dim, card): - # Parameters: stbase (array containing the name of the file where the simplex tree is located) - # fct (function values on the vertices of stbase), - # dim (homological dimension), - # card (number of persistence diagram points, sorted by distance-to-diagonal) - - # Copy stbase in another simplex tree st - st = SimplexTree() - f = open(stbase[0], "r") - for line in f: - ints = line.split(" ") - s = [int(v) for v in ints[:-1]] - st.insert(s, -1e10) - f.close() - +def _LowerStarSimplexTree(simplextree, filtration, dimension): + # Parameters: simplextree (simplex tree on which to compute persistence) + # filtration (function values on the vertices of st), + # dimension (homology dimension), + # Assign new filtration values - for i in range(st.num_vertices()): - st.assign_filtration([i], fct[i]) - st.make_filtration_non_decreasing() + for i in range(simplextree.num_vertices()): + simplextree.assign_filtration([i], filtration[i]) + simplextree.make_filtration_non_decreasing() # Compute persistence diagram - dgm = st.persistence() + dgm = simplextree.persistence() # Get vertex pairs for optimization. First, get all simplex pairs - pairs = st.persistence_pairs() + pairs = simplextree.persistence_pairs() # Then, loop over all simplex pairs indices, pers = [], [] for s1, s2 in pairs: # Select pairs with good homological dimension and finite lifetime - if len(s1) == dim+1 and len(s2) > 0: + if len(s1) == dimension+1 and len(s2) > 0: # Get IDs of the vertices corresponding to the filtration values of the simplices l1, l2 = np.array(s1), np.array(s2) - i1 = l1[np.argmax(fct[l1])] - i2 = l2[np.argmax(fct[l2])] + i1 = l1[np.argmax(filtration[l1])] + i2 = l2[np.argmax(filtration[l2])] indices.append(i1) indices.append(i2) # Compute lifetime - pers.append(st.filtration(s2) - st.filtration(s1)) + pers.append(simplextree.filtration(s2)-simplextree.filtration(s1)) # Sort vertex pairs wrt lifetime perm = np.argsort(pers) - indices = list(np.reshape(indices, [-1,2])[perm][::-1,:].flatten()) + indices = np.reshape(indices, [-1,2])[perm][::-1,:].flatten() - # Pad vertex pairs - indices = indices[:2*card] + [0 for _ in range(0,max(0,2*card-len(indices)))] - return list(np.array(indices, dtype=np.int32)) + return np.array(indices, dtype=np.int32) -class LowerStarSimplexTreeModel(tf.keras.Model): +class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ - TensorFlow model for computing lower-star persistence out of a simplex tree. Since simplex trees cannot be easily encoded as TensorFlow variables, the model takes as input a path to a file containing the simplex tree simplices, and read it each time the simplex tree is required for computations. + TensorFlow layer for computing lower-star persistence out of a simplex tree Attributes: - F (TensorFlow variable): filter function values over the vertices of the simplex tree - stbase (string): path to the file containing the simplex tree. Each line of the file should represent a simplex as a sequence of integers separated by spaces - card (int): maximum number of points in the persistence diagram - dim (int): homology dimension + simplextree (gudhi.SimplexTree()): underlying simplex tree + dimension (int): homology dimension """ - def __init__(self, F, stbase="simplextree.txt", dim=0, card=50): - super(LowerStarSimplexTreeModel, self).__init__() - self.F = F - self.dim = dim - self.card = card - self.st = stbase - - def call(self): - d, c = self.dim, self.card - st, fct = self.st, self.F + def __init__(self, simplextree, dimension=0, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.dimension = dimension + self.simplextree = simplextree + + def build(self): + super.build() + + def call(self, filtration): + """ + Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree - # Turn STPers into a numpy function - SimplexTreeTF = lambda fct: tf.numpy_function(_SimplexTree, [np.array([st], dtype=str), fct, d, c], [tf.int32 for _ in range(2*c)]) - + Parameters: + F (TensorFlow variable): filter function values over the vertices of the simplex tree + """ # Don't try to compute gradients for the vertex pairs - fcts = tf.reshape(fct, [1, self.F.shape[0]]) - inds = tf.nest.map_structure(tf.stop_gradient, tf.map_fn(SimplexTreeTF, - fcts, dtype=[tf.int32 for _ in range(2*c)])) - + indices = tf.stop_gradient(_LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimension)) # Get persistence diagram - self.dgm = tf.reshape(tf.gather_nd(self.F, inds), [c,2]) + self.dgm = tf.reshape(tf.gather(filtration, indices), [-1,2]) return self.dgm @@ -105,22 +88,20 @@ class LowerStarSimplexTreeModel(tf.keras.Model): - ############################ # Vietoris-Rips filtration # ############################ # The parameters of the model are the point coordinates. -def _Rips(DX, mel, dim, card): +def _Rips(DX, max_edge, dimension): # Parameters: DX (distance matrix), - # mel (maximum edge length for Rips filtration), - # dim (homological dimension), - # card (number of persistence diagram points, sorted by distance-to-diagonal) + # max_edge (maximum edge length for Rips filtration), + # dimension (homology dimension) # Compute the persistence pairs with Gudhi - rc = RipsComplex(distance_matrix=DX, max_edge_length=mel) - st = rc.create_simplex_tree(max_dimension=dim+1) + rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) + st = rc.create_simplex_tree(max_dimension=dimension+1) dgm = st.persistence() pairs = st.persistence_pairs() @@ -128,59 +109,54 @@ def _Rips(DX, mel, dim, card): # distance among all pairwise distances between the simplex vertices indices, pers = [], [] for s1, s2 in pairs: - if len(s1) == dim+1 and len(s2) > 0: + if len(s1) == dimension+1 and len(s2) > 0: l1, l2 = np.array(s1), np.array(s2) - i1 = [s1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(s1), len(s1)])] - i2 = [s2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(s2), len(s2)])] - indices += i1 - indices += i2 - pers.append(st.filtration(s2) - st.filtration(s1)) + i1 = [l1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(l1), len(l1)])] + i2 = [l2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(l2), len(l2)])] + indices.append(i1) + indices.append(i2) + pers.append(st.filtration(s2)-st.filtration(s1)) # Sort points with distance-to-diagonal perm = np.argsort(pers) - indices = list(np.reshape(indices, [-1,4])[perm][::-1,:].flatten()) - - # Output indices - indices = indices[:4*card] + [0 for _ in range(0,max(0,4*card-len(indices)))] - return list(np.array(indices, dtype=np.int32)) + indices = np.reshape(indices, [-1,4])[perm][::-1,:].flatten() + + return np.array(indices, dtype=np.int32) -class RipsModel(tf.keras.Model): +class RipsLayer(tf.keras.layers.Layer): """ - TensorFlow model for computing Rips persistence out of a point cloud. + TensorFlow layer for computing Rips persistence out of a point cloud Attributes: - X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] - mel (float): maximum edge length for the Rips complex - card (int): maximum number of points in the persistence diagram - dim (int): homology dimension + maximum_edge_length (float): maximum edge length for the Rips complex + dimension (int): homology dimension """ - def __init__(self, X, mel=12, dim=1, card=50): - super(RipsModel, self).__init__() - self.X = X - self.mel = mel - self.dim = dim - self.card = card - - def call(self): - m, d, c = self.mel, self.dim, self.card + def __init__(self, maximum_edge_length=12, dimension=1, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.max_edge = maximum_edge_length + self.dimension = dimension + + def build(self): + super.build() + def call(self, X): + """ + Compute Rips persistence diagram associated to a point cloud + + Parameters: + X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] + """ # Compute distance matrix - DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(self.X, 1)-tf.expand_dims(self.X, 0))**2, 2)) - DXX = tf.reshape(DX, [1, DX.shape[0], DX.shape[1]]) - - # Turn numpy function into tensorflow function - RipsTF = lambda DX: tf.numpy_function(_Rips, [DX, m, d, c], [tf.int32 for _ in range(4*c)]) - + DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) # Compute vertices associated to positive and negative simplices # Don't compute gradient for this operation - ids = tf.nest.map_structure(tf.stop_gradient, tf.map_fn(RipsTF,DXX,dtype=[tf.int32 for _ in range(4*c)])) - + indices = tf.stop_gradient(_Rips(DX.numpy(), self.max_edge, self.dimension)) # Get persistence diagram by simply picking the corresponding entries in the distance matrix - if d > 0: - dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(ids, [2*c,2])), [c,2]) + if self.dimension > 0: + dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(indices, [-1,2])), [-1,2]) else: - ids = tf.reshape(ids, [2*c,2])[1::2,:] - dgm = tf.concat([tf.zeros([c,1]), tf.reshape(tf.gather_nd(DX, ids), [c,1])], axis=1) + indices = tf.reshape(indices, [-1,2])[1::2,:] + dgm = tf.concat([tf.zeros([indices.shape[0],1]), tf.reshape(tf.gather_nd(DX, indices), [-1,1])], axis=1) return dgm @@ -197,16 +173,15 @@ class RipsModel(tf.keras.Model): # The parameters of the model are the pixel values. -def _Cubical(X, dim, card): +def _Cubical(X, dimension): # Parameters: X (image), - # dim (homological dimension), - # card (number of persistence diagram points, sorted by distance-to-diagonal) + # dimension (homology dimension) # Compute the persistence pairs with Gudhi cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) cc.persistence() try: - cof = cc.cofaces_of_persistence_pairs()[0][dim] + cof = cc.cofaces_of_persistence_pairs()[0][dimension] except IndexError: cof = np.array([]) @@ -218,41 +193,39 @@ def _Cubical(X, dim, card): cof = cof[perm[::-1]] # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices - D = len(Xs) - ocof = np.array([0 for _ in range(D*card*2)]) + D = len(Xs) if len(cof) > 0 else 1 + ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) count = 0 - for idx in range(0,min(2*card, 2*cof.shape[0]),2): + for idx in range(0,2*cof.shape[0],2): ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) count += 1 - return list(np.array(ocof, dtype=np.int32)) + return np.array(ocof, dtype=np.int32) -class CubicalModel(tf.keras.Model): +class CubicalLayer(tf.keras.layers.Layer): """ - TensorFlow model for computing cubical persistence out of a cubical complex. + TensorFlow layer for computing cubical persistence out of a cubical complex Attributes: - X (TensorFlow variable): pixel values of the cubical complex - card (int): maximum number of points in the persistence diagram - dim (int): homology dimension + dimension (int): homology dimension """ - def __init__(self, X, dim=1, card=50): - super(CubicalModel, self).__init__() - self.X = X - self.dim = dim - self.card = card - - def call(self): - d, c, D = self.dim, self.card, len(self.X.shape) - XX = tf.reshape(self.X, [1, self.X.shape[0], self.X.shape[1]]) - - # Turn numpy function into tensorflow function - CbTF = lambda X: tf.numpy_function(_Cubical, [X, d, c], [tf.int32 for _ in range(2*D*c)]) + def __init__(self, dimension=1, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.dimension = dimension + + def build(self): + super.build() + def call(self, X): + """ + Compute persistence diagram associated to a cubical complex filtered by some pixel values + + Parameters: + X (TensorFlow variable): pixel values of the cubical complex + """ # Compute pixels associated to positive and negative simplices # Don't compute gradient for this operation - inds = tf.nest.map_structure(tf.stop_gradient, tf.map_fn(CbTF,XX,dtype=[tf.int32 for _ in range(2*D*c)])) - + indices = tf.stop_gradient(_Cubical(X.numpy(), self.dimension)) # Get persistence diagram by simply picking the corresponding entries in the image - dgm = tf.reshape(tf.gather_nd(self.X, tf.reshape(inds, [-1,D])), [-1,2]) + dgm = tf.reshape(tf.gather_nd(X, tf.reshape(indices, [-1,len(X.shape)])), [-1,2]) return dgm diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index d42e25cd..129b9f03 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -1,41 +1,67 @@ from gudhi.differentiation import * import numpy as np import tensorflow as tf +import gudhi as gd def test_rips_diff(): Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32) X = tf.Variable(initial_value=Xinit, trainable=True) - model = RipsModel(X=X, mel=2., dim=0, card=10) + rl = RipsLayer(maximum_edge_length=2., dimension=0) with tf.GradientTape() as tape: - dgm = model.call() + dgm = rl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) - grads = tape.gradient(loss, [X]) - assert np.abs(grads[0].numpy()-np.array([[-.5,-.5],[.5,.5]])).sum() <= 1e-6 + grads = tape.gradient(loss, [X]) + assert np.abs(grads[0].numpy()-np.array([[-.5,-.5],[.5,.5]])).sum() <= 1e-6 def test_cubical_diff(): Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32) X = tf.Variable(initial_value=Xinit, trainable=True) - model = CubicalModel(X, dim=0, card=10) + cl = CubicalLayer(dimension=0) with tf.GradientTape() as tape: - dgm = model.call() + dgm = cl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) - grads = tape.gradient(loss, [X]) - assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 + grads = tape.gradient(loss, [X]) + assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 def test_st_diff(): + st = gd.SimplexTree() + st.insert([0]) + st.insert([1]) + st.insert([2]) + st.insert([3]) + st.insert([4]) + st.insert([5]) + st.insert([6]) + st.insert([7]) + st.insert([8]) + st.insert([9]) + st.insert([10]) + st.insert([0, 1]) + st.insert([1, 2]) + st.insert([2, 3]) + st.insert([3, 4]) + st.insert([4, 5]) + st.insert([5, 6]) + st.insert([6, 7]) + st.insert([7, 8]) + st.insert([8, 9]) + st.insert([9, 10]) + Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) F = tf.Variable(initial_value=Finit, trainable=True) - model = LowerStarSimplexTreeModel(F, stbase="../../../data/filtered_simplicial_complex/simplextree.txt", dim=0, card=10) + sl = LowerStarSimplexTreeLayer(simplextree=st, dimension=0) with tf.GradientTape() as tape: - dgm = model.call() + dgm = sl.call(F) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) - grads = tape.gradient(loss, [F]) - assert np.array_equal(np.array(grads[0].indices), np.array([2,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0])) - assert np.array_equal(np.array(grads[0].values), np.array([-1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0])) + grads = tape.gradient(loss, [F]) + + assert np.array_equal(np.array(grads[0].indices), np.array([2,4])) + assert np.array_equal(np.array(grads[0].values), np.array([-1,1])) + -- cgit v1.2.3 From a384882db85e75f8d4bdf8df1b293b9d91e0c406 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Wed, 7 Jul 2021 15:11:24 +0200 Subject: small fix --- src/python/gudhi/differentiation/tensorflow.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py index 0f5df9a2..15d5811e 100644 --- a/src/python/gudhi/differentiation/tensorflow.py +++ b/src/python/gudhi/differentiation/tensorflow.py @@ -19,6 +19,9 @@ def _LowerStarSimplexTree(simplextree, filtration, dimension): # Parameters: simplextree (simplex tree on which to compute persistence) # filtration (function values on the vertices of st), # dimension (homology dimension), + + for s,_ in simplextree.get_filtration(): + simplextree.assign_filtration(s, -1e10) # Assign new filtration values for i in range(simplextree.num_vertices()): -- cgit v1.2.3 From 91b0ff839b8058d3f5767e6ed80b93c23be2c98a Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 9 Aug 2021 16:21:24 +0200 Subject: First version of cech enhancement --- .../benchmark/cech_complex_benchmark.cpp | 5 +- .../example/cech_complex_example_from_points.cpp | 71 ++++++++++--- .../example/cech_complex_step_by_step.cpp | 18 ++-- src/Cech_complex/include/gudhi/Cech_complex.h | 26 +++-- .../include/gudhi/Cech_complex_blocker.h | 110 ++++++++++++++++++++- src/Cech_complex/test/test_cech_complex.cpp | 86 +++++++++++----- src/Cech_complex/utilities/cech_persistence.cpp | 10 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 4 + src/common/include/gudhi/distance_functions.h | 42 ++++++++ .../include/gudhi/graph_simplicial_complex.h | 3 + 10 files changed, 311 insertions(+), 64 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index e489e8a4..c332c656 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -17,6 +17,8 @@ #include #include +#include // For EXACT or SAFE version + #include "boost/filesystem.hpp" // includes all needed Boost.Filesystem declarations #include @@ -30,7 +32,8 @@ using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; using Proximity_graph = Gudhi::Proximity_graph; using Rips_complex = Gudhi::rips_complex::Rips_complex; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Kernel = CGAL::Epeck_d; +using Cech_complex = Gudhi::cech_complex::Cech_complex; class Minimal_enclosing_ball_radius { public: diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp index 1a1f708c..ac17fc73 100644 --- a/src/Cech_complex/example/cech_complex_example_from_points.cpp +++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp @@ -1,6 +1,8 @@ #include #include +#include // For EXACT or SAFE version + #include #include #include @@ -8,32 +10,71 @@ int main() { // Type definitions - using Point_cloud = std::vector>; +// using Point_cloud = std::vector>; using Simplex_tree = Gudhi::Simplex_tree; using Filtration_value = Simplex_tree::Filtration_value; - using Cech_complex = Gudhi::cech_complex::Cech_complex; + using Kernel = CGAL::Epeck_d; + using FT = typename Kernel::FT; + using Point = typename Kernel::Point_d; + using Point_cloud = std::vector; + using Cech_complex = Gudhi::cech_complex::Cech_complex; Point_cloud points; - points.push_back({1., 0.}); // 0 - points.push_back({0., 1.}); // 1 - points.push_back({2., 1.}); // 2 - points.push_back({3., 2.}); // 3 - points.push_back({0., 3.}); // 4 - points.push_back({3. + std::sqrt(3.), 3.}); // 5 - points.push_back({1., 4.}); // 6 - points.push_back({3., 4.}); // 7 - points.push_back({2., 4. + std::sqrt(3.)}); // 8 - points.push_back({0., 4.}); // 9 - points.push_back({-0.5, 2.}); // 10 +// points.push_back({1., 0.}); // 0 +// points.push_back({0., 1.}); // 1 +// points.push_back({2., 1.}); // 2 +// points.push_back({3., 2.}); // 3 +// points.push_back({0., 3.}); // 4 +// points.push_back({3. + std::sqrt(3.), 3.}); // 5 + +// std::vector point({0.0, 0.0, 0.0, 0.0}); +// points.emplace_back(point.begin(), point.end()); + + std::vector point0({1., 0.}); + points.emplace_back(point0.begin(), point0.end()); + std::vector point1({0., 1.}); + points.emplace_back(point1.begin(), point1.end()); + std::vector point2({2., 1.}); + points.emplace_back(point2.begin(), point2.end()); + std::vector point3({3., 2.}); + points.emplace_back(point3.begin(), point3.end()); + std::vector point4({0., 3.}); + points.emplace_back(point4.begin(), point4.end()); + std::vector point5({3. + std::sqrt(3.), 3.}); + points.emplace_back(point5.begin(), point5.end()); + +// points.emplace_back(Point(std::vector({1., 0.}))); +// points.emplace_back(Point(std::vector({0., 1.}))); +// points.emplace_back(Point(std::vector({2., 1.}))); +// points.emplace_back(Point(std::vector({3., 2.}))); +// points.emplace_back(Point(std::vector({0., 3.}))); +// points.emplace_back(Point(std::vector({3. + std::sqrt(3.), 3.}))); + + +// points.push_back(Point(1.0, 0.0)); +// points.push_back(Point(0.0, 1.0)); +// points.push_back(Point(2.0, 1.0)); +// points.push_back(Point(3.0, 2.0)); +// points.push_back(Point(0.0, 3.0)); +// points.push_back(Point(3.0 + std::sqrt(3.0), 3.0)); + + +// points.push_back({1., 4.}); // 6 +// points.push_back({3., 4.}); // 7 +// points.push_back({2., 4. + std::sqrt(3.)}); // 8 +// points.push_back({0., 4.}); // 9 +// points.push_back({-0.5, 2.}); // 10 // ---------------------------------------------------------------------------- // Init of a Cech complex from points // ---------------------------------------------------------------------------- - Filtration_value max_radius = 1.; + Filtration_value max_radius = 10.; + std::clog << "Hind: Just before the Cech constructor" << std::endl; Cech_complex cech_complex_from_points(points, max_radius); + std::clog << "Hind: Just after the Cech constructor" << std::endl; Simplex_tree stree; - cech_complex_from_points.create_complex(stree, 2); + cech_complex_from_points.create_complex(stree, 3); // ---------------------------------------------------------------------------- // Display information about the one skeleton Cech complex // ---------------------------------------------------------------------------- diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index 60ae9712..ac08e6cc 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -13,7 +13,9 @@ #include #include -#include +#include // TODO to remove ? + +#include #include @@ -34,16 +36,18 @@ using Simplex_tree = Gudhi::Simplex_tree<>; using Simplex_handle = Simplex_tree::Simplex_handle; using Filtration_value = Simplex_tree::Filtration_value; -using Point = std::vector; +// using Point = std::vector; +using Kernel = CGAL::Epeck_d; +using Point = typename Kernel::Point_d; using Points_off_reader = Gudhi::Points_off_reader; using Proximity_graph = Gudhi::Proximity_graph; class Cech_blocker { private: using Point_cloud = std::vector; - using Point_iterator = Point_cloud::const_iterator; - using Coordinate_iterator = Point::const_iterator; - using Min_sphere = Gudhi::Miniball::Miniball>; +// using Point_iterator = Point_cloud::const_iterator; +// using Coordinate_iterator = Point::const_iterator; +// using Min_sphere = Gudhi::Miniball::Miniball>; public: bool operator()(Simplex_handle sh) { @@ -63,14 +67,14 @@ class Cech_blocker { } Cech_blocker(Simplex_tree& simplex_tree, Filtration_value max_radius, const std::vector& point_cloud) : simplex_tree_(simplex_tree), max_radius_(max_radius), point_cloud_(point_cloud) { - dimension_ = point_cloud_[0].size(); +// dimension_ = point_cloud_[0].size(); } private: Simplex_tree simplex_tree_; Filtration_value max_radius_; std::vector point_cloud_; - int dimension_; +// int dimension_; }; void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max); diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index b0871e10..2c6f202a 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -40,7 +40,7 @@ namespace cech_complex { * \tparam ForwardPointRange must be a range for which `std::begin()` and `std::end()` methods return input * iterators on a point. `std::begin()` and `std::end()` methods are also required for a point. */ -template +template class Cech_complex { private: // Required by compute_proximity_graph @@ -49,14 +49,15 @@ class Cech_complex { using Proximity_graph = Gudhi::Proximity_graph; // Retrieve Coordinate type from ForwardPointRange - using Point_from_range_iterator = typename boost::range_const_iterator::type; - using Point_from_range = typename std::iterator_traits::value_type; - using Coordinate_iterator = typename boost::range_const_iterator::type; - using Coordinate = typename std::iterator_traits::value_type; +// using Point_from_range_iterator = typename boost::range_const_iterator::type; +// using Point_from_range = typename std::iterator_traits::value_type; +// using Coordinate_iterator = typename boost::range_const_iterator::type; +// using Coordinate = typename std::iterator_traits::value_type; public: // Point and Point_cloud type definition - using Point = std::vector; + //using Point = std::vector; + using Point = typename Kernel::Point_d; using Point_cloud = std::vector; public: @@ -70,9 +71,13 @@ class Cech_complex { */ Cech_complex(const ForwardPointRange& points, Filtration_value max_radius) : max_radius_(max_radius) { // Point cloud deep copy - point_cloud_.reserve(boost::size(points)); - for (auto&& point : points) point_cloud_.emplace_back(std::begin(point), std::end(point)); +// point_cloud_.reserve(boost::size(points)); +// for (auto&& point : points) point_cloud_.emplace_back(point.cartesian_begin(), point.cartesian_end()); + + point_cloud_.assign(points.begin(), points.end()); + + std::clog << "Hind: Just before the graph compute" << std::endl; cech_skeleton_graph_ = Gudhi::compute_proximity_graph( point_cloud_, max_radius_, Gudhi::Minimal_enclosing_ball_radius()); } @@ -87,14 +92,17 @@ class Cech_complex { */ template void create_complex(SimplicialComplexForCechComplex& complex, int dim_max) { + std::clog << "Hind: in create complex" << std::endl; GUDHI_CHECK(complex.num_vertices() == 0, std::invalid_argument("Cech_complex::create_complex - simplicial complex is not empty")); // insert the proximity graph in the simplicial complex + std::clog << "Hind: before insert_graph" << std::endl; complex.insert_graph(cech_skeleton_graph_); // expand the graph until dimension dim_max + std::clog << "Hind: before expansion_with_blockers" << std::endl; complex.expansion_with_blockers(dim_max, - Cech_blocker(&complex, this)); + Cech_blocker(&complex, this)); } /** @return max_radius value given at construction. */ diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 31b9aab5..f2cf5ccc 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -11,8 +11,15 @@ #ifndef CECH_COMPLEX_BLOCKER_H_ #define CECH_COMPLEX_BLOCKER_H_ +// TODO to remove #include // for Gudhi::Minimal_enclosing_ball_radius +#include // for CGAL::to_double +#include // +// #include +#include // For EXACT or SAFE version +#include + #include #include #include // for std::sqrt @@ -35,28 +42,120 @@ namespace cech_complex { * * \tparam Chech_complex is required by the blocker. */ -template +template class Cech_blocker { private: - using Point_cloud = typename Cech_complex::Point_cloud; +// using Point_cloud = typename Cech_complex::Point_cloud; using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle; using Filtration_value = typename SimplicialComplexForCech::Filtration_value; public: + + using Point_d = typename Kernel::Point_d; + // Numeric type of coordinates in the kernel + using FT = typename Kernel::FT; + // Sphere is a pair of point and squared radius. + using Sphere = typename std::pair; + + + // Add an int in TDS to save point index in the structure +// using TDS = CGAL::Triangulation_data_structure, +// CGAL::Triangulation_full_cell >; +// +// /** \brief A (Weighted or not) Delaunay triangulation of a set of points in \f$ \mathbb{R}^D\f$.*/ +// using Triangulation = CGAL::Delaunay_triangulation; +// // Vertex_iterator type from CGAL. +// using CGAL_vertex_iterator = typename Triangulation::Vertex_iterator; + + // Structure to switch from simplex tree vertex handle to CGAL vertex iterator. + //using Vector_vertex_iterator = std::vector< CGAL_vertex_iterator >; + + + + /** \brief get_point_ returns the point corresponding to the vertex given as parameter. + * Only for internal use for faster access. + * + * @param[in] vertex Vertex handle of the point to retrieve. + * @return The point found. + */ +/* const Point_d& get_point_(std::size_t vertex) const { + return vertex_handle_to_iterator_[vertex]->point(); + } */ + + /** \internal \brief TODO + * \param[in] + * \return */ + template + FT get_squared_radius(PointIterator begin, PointIterator end) const { + return kernel_.compute_squared_radius_d_object()(begin, end); + } + + /** \internal \brief TODO + * \param[in] + * \return */ + template + Sphere get_sphere(PointIterator begin, PointIterator end) const { + Point_d c = kernel_.construct_circumcenter_d_object()(begin, end); + FT r = kernel_.squared_distance_d_object()(c, *begin); + return std::make_pair(std::move(c), std::move(r)); + } + + /** \internal \brief Čech complex blocker operator() - the oracle - assigns the filtration value from the simplex * radius and returns if the simplex expansion must be blocked. * \param[in] sh The Simplex_handle. * \return true if the simplex radius is greater than the Cech_complex max_radius*/ bool operator()(Simplex_handle sh) { + using Point_cloud = std::vector; Point_cloud points; + + // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices + for (auto face : sc_ptr_->simplex_vertex_range(sh)) { + ///////////////////////////////////////////////////////////////////// + + + + + /////////////////////////////////// + } + for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) { - points.push_back(cc_ptr_->get_point(vertex)); + points.push_back(cc_ptr_->get_point(vertex)); +// points.push_back(get_point_(vertex)); #ifdef DEBUG_TRACES std::clog << "#(" << vertex << ")#"; #endif // DEBUG_TRACES } - Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points); + // TODO to remove + //Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points); + // Hind: Here change the algo of the enclosing Minimal_enclosing_ball_radius + auto point_to_be_inserted = points.back(); + Sphere sph = get_sphere(points.cbegin(), points.cend()-1); + +// Sphere sph = get_sphere(points.cbegin(), points.cend()-1); + CGAL::NT_converter cast_to_double; +// CGAL::NT_converter cast_point_d_to_double; + + std::clog << "circumcenter: " << sph.first << ", radius: " << std::sqrt(cast_to_double(sph.second))<< std::endl; + // TODO to remove + // Filtration_value test = std::sqrt(CGAL::to_double(sph.second)); + + + // Check that the point to be inserted is already included in the sphere of the simplex containing the preceding points + // TODO instead of Euclidean_distance ; use kernel_.squared_distance_d_object()(c, *begin); + // Add a loop on the three faces to check sphere before computing the circumsphere + // Add the computed sphere as cache; a vector of spheres depending on the number of faces ? + // +// if (Gudhi::Euclidean_distance()(cast_point_d_to_double(sph.first), point_to_be_inserted) > std::sqrt(cast_to_double(sph.second))) +// FT r = kernel_.squared_distance_d_object()(sph.first, sph.first); //*(points.cend()-1)); + if (kernel_.squared_distance_d_object()(sph.first, point_to_be_inserted) > sph.second) + sph = get_sphere(points.cbegin(), points.cend()); + + Filtration_value radius = std::sqrt(cast_to_double(sph.second)); + + #ifdef DEBUG_TRACES if (radius > cc_ptr_->max_radius()) std::clog << "radius > max_radius => expansion is blocked\n"; #endif // DEBUG_TRACES @@ -70,6 +169,9 @@ class Cech_blocker { private: SimplicialComplexForCech* sc_ptr_; Cech_complex* cc_ptr_; + Kernel kernel_; + //Vector_vertex_iterator vertex_handle_to_iterator_; + }; } // namespace cech_complex diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index 6e00d7b5..81efd6ae 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -26,17 +26,24 @@ #include #include +#include // For EXACT or SAFE version + + // Type definitions using Simplex_tree = Gudhi::Simplex_tree<>; using Filtration_value = Simplex_tree::Filtration_value; -using Point = std::vector; +//using Point = std::vector; +using Kernel = CGAL::Epeck_d; +using FT = typename Kernel::FT; +using Point = typename Kernel::Point_d; + using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Cech_complex = Gudhi::cech_complex::Cech_complex; -using Point_iterator = Point_cloud::const_iterator; -using Coordinate_iterator = Point::const_iterator; -using Min_sphere = Gudhi::Miniball::Miniball>; +// using Point_iterator = Point_cloud::const_iterator; +// using Coordinate_iterator = Point::const_iterator; +// using Min_sphere = Gudhi::Miniball::Miniball>; BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { // ---------------------------------------------------------------------------- @@ -45,17 +52,41 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { // // ---------------------------------------------------------------------------- Point_cloud points; - points.push_back({1., 0.}); // 0 - points.push_back({0., 1.}); // 1 - points.push_back({2., 1.}); // 2 - points.push_back({3., 2.}); // 3 - points.push_back({0., 3.}); // 4 - points.push_back({3. + std::sqrt(3.), 3.}); // 5 - points.push_back({1., 4.}); // 6 - points.push_back({3., 4.}); // 7 - points.push_back({2., 4. + std::sqrt(3.)}); // 8 - points.push_back({0., 4.}); // 9 - points.push_back({-0.5, 2.}); // 10 +// points.push_back({1., 0.}); // 0 +// points.push_back({0., 1.}); // 1 +// points.push_back({2., 1.}); // 2 +// points.push_back({3., 2.}); // 3 +// points.push_back({0., 3.}); // 4 +// points.push_back({3. + std::sqrt(3.), 3.}); // 5 +// points.push_back({1., 4.}); // 6 +// points.push_back({3., 4.}); // 7 +// points.push_back({2., 4. + std::sqrt(3.)}); // 8 +// points.push_back({0., 4.}); // 9 +// points.push_back({-0.5, 2.}); // 10 + + + std::vector point0({1., 0.}); + points.emplace_back(point0.begin(), point0.end()); + std::vector point1({0., 1.}); + points.emplace_back(point1.begin(), point1.end()); + std::vector point2({2., 1.}); + points.emplace_back(point2.begin(), point2.end()); + std::vector point3({3., 2.}); + points.emplace_back(point3.begin(), point3.end()); + std::vector point4({0., 3.}); + points.emplace_back(point4.begin(), point4.end()); + std::vector point5({3. + std::sqrt(3.), 3.}); + points.emplace_back(point5.begin(), point5.end()); + std::vector point6({1., 4.}); + points.emplace_back(point6.begin(), point6.end()); + std::vector point7({3., 4.}); + points.emplace_back(point7.begin(), point7.end()); + std::vector point8({2., 4. + std::sqrt(3.)}); + points.emplace_back(point8.begin(), point8.end()); + std::vector point9({0., 4.}); + points.emplace_back(point9.begin(), point9.end()); + std::vector point10({-0.5, 2.}); + points.emplace_back(point10.begin(), point10.end()); Filtration_value max_radius = 1.0; std::clog << "========== NUMBER OF POINTS = " << points.size() << " - Cech max_radius = " << max_radius @@ -125,35 +156,38 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { for (std::size_t vertex = 0; vertex <= 2; vertex++) { points012.push_back(cech_complex_for_doc.get_point(vertex)); } - std::size_t dimension = points[0].end() - points[0].begin(); - Min_sphere ms012(dimension, points012.begin(), points012.end()); +// std::size_t dimension = points[0].end() - points[0].begin(); +// Min_sphere ms012(dimension, points012.begin(), points012.end()); + Kernel kern; Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2})); - std::clog << "f012= " << f012 << " | ms012_radius= " << std::sqrt(ms012.squared_radius()) << std::endl; + CGAL::NT_converter cast_to_double; + std::clog << "f012= " << f012 << " | points012_radius= " << std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points012.begin(), points012.end()))) << std::endl; + - GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(ms012.squared_radius())); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points012.begin(), points012.end())))); Point_cloud points1410; points1410.push_back(cech_complex_for_doc.get_point(1)); points1410.push_back(cech_complex_for_doc.get_point(4)); points1410.push_back(cech_complex_for_doc.get_point(10)); - Min_sphere ms1410(dimension, points1410.begin(), points1410.end()); +// Min_sphere ms1410(dimension, points1410.begin(), points1410.end()); Simplex_tree::Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10})); - std::clog << "f1410= " << f1410 << " | ms1410_radius= " << std::sqrt(ms1410.squared_radius()) << std::endl; + std::clog << "f1410= " << f1410 << " | points1410_radius= " << std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points1410.begin(), points1410.end()))) << std::endl; - GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, std::sqrt(ms1410.squared_radius())); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points1410.begin(), points1410.end())))); Point_cloud points469; points469.push_back(cech_complex_for_doc.get_point(4)); points469.push_back(cech_complex_for_doc.get_point(6)); points469.push_back(cech_complex_for_doc.get_point(9)); - Min_sphere ms469(dimension, points469.begin(), points469.end()); +// Min_sphere ms469(dimension, points469.begin(), points469.end()); Simplex_tree::Filtration_value f469 = st2.filtration(st2.find({4, 6, 9})); - std::clog << "f469= " << f469 << " | ms469_radius= " << std::sqrt(ms469.squared_radius()) << std::endl; + std::clog << "f469= " << f469 << " | points469_radius= " << std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points469.begin(), points469.end()))) << std::endl; - GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(ms469.squared_radius())); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points469.begin(), points469.end())))); BOOST_CHECK((st2.find({6, 7, 8}) == st2.null_simplex())); BOOST_CHECK((st2.find({3, 5, 7}) == st2.null_simplex())); diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp index daea08e2..ccd7d453 100644 --- a/src/Cech_complex/utilities/cech_persistence.cpp +++ b/src/Cech_complex/utilities/cech_persistence.cpp @@ -16,6 +16,8 @@ #include +#include // For EXACT or SAFE version + #include #include #include // infinity @@ -23,10 +25,14 @@ // Types definition using Simplex_tree = Gudhi::Simplex_tree; using Filtration_value = Simplex_tree::Filtration_value; -using Point = std::vector; +// using Point = std::vector; +// using Point_cloud = std::vector; + +using Kernel = CGAL::Epeck_d; +using Point = typename Kernel::Point_d; using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Cech_complex = Gudhi::cech_complex::Cech_complex; using Field_Zp = Gudhi::persistent_cohomology::Field_Zp; using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology; diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 85790baf..f69ed6ec 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -1278,8 +1278,10 @@ class Simplex_tree { intersection.emplace_back(next->first, Node(nullptr, filt)); } } + std::clog << "Hind: after intersection insertion" << std::endl; if (intersection.size() != 0) { // Reverse the order to insert + std::clog << "Hind: declare new siblings" << std::endl; Siblings * new_sib = new Siblings(siblings, // oncles simplex->first, // parent boost::adaptors::reverse(intersection)); // boost::container::ordered_unique_range_t @@ -1288,10 +1290,12 @@ class Simplex_tree { for (auto new_sib_member = new_sib->members().begin(); new_sib_member != new_sib->members().end(); new_sib_member++) { + std::clog << "Hind: check the blocker result" << std::endl; bool blocker_result = block_simplex(new_sib_member); // new_sib member has been blocked by the blocker function // add it to the list to be removed - do not perform it while looping on it if (blocker_result) { + std::clog << "Hind: add to list of blocked sib to be removed" << std::endl; blocked_new_sib_vertex_list.push_back(new_sib_member->first); } } diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h index 9bbc62b7..ae5168aa 100644 --- a/src/common/include/gudhi/distance_functions.h +++ b/src/common/include/gudhi/distance_functions.h @@ -18,6 +18,8 @@ #include #include +#include + #include // for std::sqrt #include // for std::decay #include // for std::begin, std::end @@ -63,6 +65,23 @@ class Euclidean_distance { * The points are assumed to have the same dimension. */ class Minimal_enclosing_ball_radius { public: + /** \brief TODO + * + * @param[in] point_1 + * @param[in] point_2 + * @return + * \tparam Point + * + */ + //typename FT = typename Kernel::FT, + template< typename Kernel = CGAL::Epeck_d, + typename Point= typename Kernel::Point_d> + double operator()(const Point& point_1, const Point& point_2) const { + std::clog << "Added template: distance betw points 1 and 2" << std::endl; + Kernel kernel_; + return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; + } + /** \brief Minimal_enclosing_ball_radius from two points. * * @param[in] point_1 First point. @@ -75,8 +94,29 @@ class Minimal_enclosing_ball_radius { template< typename Point > typename std::iterator_traits::type>::value_type operator()(const Point& point_1, const Point& point_2) const { + std::clog << "Hind: Minimal_enclosing_ball_radius point1 et 2; Euclidean" << std::endl; + std::clog << "#" << *point_1.begin() << "##" << *point_2.begin() << std::endl; return Euclidean_distance()(point_1, point_2) / 2.; } + + + /** \brief TODO + * + * @param[in] point_cloud The points. + * @return + * \tparam Point_cloud + * + */ + //typename FT = typename Kernel::FT, + template< typename Kernel = CGAL::Epeck_d, + typename Point= typename Kernel::Point_d, + typename Point_cloud = std::vector> + double operator()(const Point_cloud& point_cloud) const { + std::clog << "Added template: distance in point cloud" << std::endl; + Kernel kernel_; + return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); + } + /** \brief Minimal_enclosing_ball_radius from a point cloud. * * @param[in] point_cloud The points. @@ -93,6 +133,8 @@ class Minimal_enclosing_ball_radius { typename Coordinate = typename std::iterator_traits::value_type> Coordinate operator()(const Point_cloud& point_cloud) const { + std::clog << "Hind: Minimal_enclosing_ball_radius point cloud; Miniball" << std::endl; + using Min_sphere = Miniball::Miniball>; Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end()); diff --git a/src/common/include/gudhi/graph_simplicial_complex.h b/src/common/include/gudhi/graph_simplicial_complex.h index da9dee7d..9190182c 100644 --- a/src/common/include/gudhi/graph_simplicial_complex.h +++ b/src/common/include/gudhi/graph_simplicial_complex.h @@ -18,6 +18,8 @@ #include #include // for std::tie +#include + namespace Gudhi { /** @file * @brief Graph simplicial complex methods @@ -76,6 +78,7 @@ Proximity_graph compute_proximity_graph( for (auto it_u = points.begin(); it_u != points.end(); ++it_u) { idx_v = idx_u + 1; for (auto it_v = it_u + 1; it_v != points.end(); ++it_v, ++idx_v) { + std::clog << "#idx_u" << idx_u << "#idx_v " << idx_v << std::endl; fil = distance(*it_u, *it_v); if (fil <= threshold) { edges.emplace_back(idx_u, idx_v); -- cgit v1.2.3 From bc28892cbae3d9a9fcc19a0fbcfcc98bb9195ff7 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 18 Aug 2021 17:45:51 +0200 Subject: Modify the algorithm to get the minimal enclosing ball --- .../include/gudhi/Cech_complex_blocker.h | 167 +++++++++++++-------- 1 file changed, 102 insertions(+), 65 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index f2cf5ccc..acb53143 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -22,6 +22,9 @@ #include #include +#include +#include +#include #include // for std::sqrt namespace Gudhi { @@ -57,32 +60,7 @@ class Cech_blocker { using FT = typename Kernel::FT; // Sphere is a pair of point and squared radius. using Sphere = typename std::pair; - - - // Add an int in TDS to save point index in the structure -// using TDS = CGAL::Triangulation_data_structure, -// CGAL::Triangulation_full_cell >; -// -// /** \brief A (Weighted or not) Delaunay triangulation of a set of points in \f$ \mathbb{R}^D\f$.*/ -// using Triangulation = CGAL::Delaunay_triangulation; -// // Vertex_iterator type from CGAL. -// using CGAL_vertex_iterator = typename Triangulation::Vertex_iterator; - - // Structure to switch from simplex tree vertex handle to CGAL vertex iterator. - //using Vector_vertex_iterator = std::vector< CGAL_vertex_iterator >; - - - /** \brief get_point_ returns the point corresponding to the vertex given as parameter. - * Only for internal use for faster access. - * - * @param[in] vertex Vertex handle of the point to retrieve. - * @return The point found. - */ -/* const Point_d& get_point_(std::size_t vertex) const { - return vertex_handle_to_iterator_[vertex]->point(); - } */ /** \internal \brief TODO * \param[in] @@ -102,6 +80,19 @@ class Cech_blocker { return std::make_pair(std::move(c), std::move(r)); } + /** \internal \brief TODO + * \param[in] + * \return */ + template + class CompareSpheresRadii + { + public: + CGAL::NT_converter cast_to_double; + bool operator()(const Sphere& firstSphere, const Sphere& secondSphere) + { + return cast_to_double(firstSphere.second) < cast_to_double(secondSphere.second); + } + }; /** \internal \brief Čech complex blocker operator() - the oracle - assigns the filtration value from the simplex * radius and returns if the simplex expansion must be blocked. @@ -109,51 +100,98 @@ class Cech_blocker { * \return true if the simplex radius is greater than the Cech_complex max_radius*/ bool operator()(Simplex_handle sh) { using Point_cloud = std::vector; - Point_cloud points; + CGAL::NT_converter cast_to_double; + Filtration_value radius = 0.; + std::string key_to_permute; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices - for (auto face : sc_ptr_->simplex_vertex_range(sh)) { - ///////////////////////////////////////////////////////////////////// - - + // std::set enclosing_ball_radii; + std::set > enclosing_ball_spheres; + for (auto face : sc_ptr_->boundary_simplex_range(sh)) { + // Find which vertex of sh is missing in face. We rely on the fact that simplex_vertex_range is sorted. + auto longlist = sc_ptr_->simplex_vertex_range(sh); + auto shortlist = sc_ptr_->simplex_vertex_range(face); + + std::clog << "Hind debug: within FACE loop "<< std::endl; + // TODO to remove + for (auto i = std::begin(longlist); i != std::end(longlist);++i) + std::clog << "Hind debug: longlist: " << cc_ptr_->get_point(*i) << std::endl; + for (auto i = std::begin(shortlist); i != std::end(shortlist);++i) + std::clog << "Hind debug: shortlist: " << cc_ptr_->get_point(*i) << std::endl; + + auto longiter = std::begin(longlist); + auto shortiter = std::begin(shortlist); + auto enditer = std::end(shortlist); + while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; } + auto extra = *longiter; // Vertex_handle + + std::clog << "Hind debug: extra vertex: " << cc_ptr_->get_point(extra) << std::endl; - /////////////////////////////////// + Point_cloud face_points; + std::string key, key_extra; + for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { + face_points.push_back(cc_ptr_->get_point(vertex)); + key.append(std::to_string(vertex)); + #ifdef DEBUG_TRACES + std::clog << "#(" << vertex << ")#"; + #endif // DEBUG_TRACES + } + key_extra = key; + key_extra.append(std::to_string(extra)); + key_to_permute = key_extra; + std::clog << "END OF VERTICES " << std::endl; + std::clog << "KEY is: " << key << std::endl; + std::clog << "KEY extra is: " << key_extra << std::endl; + Sphere sph; + auto it = cache_.find(key); + if(it != cache_.end()) + sph = it->second; + else { + sph = get_sphere(face_points.cbegin(), face_points.cend()); + } + if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { + radius = std::sqrt(cast_to_double(sph.second)); + #ifdef DEBUG_TRACES + std::clog << "circumcenter: " << sph.first << ", radius: " << radius << std::endl; + #endif // DEBUG_TRACES + std::clog << "distance FYI: " << kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " < " << cast_to_double(sph.second) << std::endl; + // enclosing_ball_radii.insert(radius); + enclosing_ball_spheres.insert(sph); + cache_[key_extra] = sph; + } + else {// TODO to remove + std::clog << "vertex not included BECAUSE DISTANCE: "<< kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " AND RAD SPHERE: " << sph.second << std::endl; + } } - - for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) { - points.push_back(cc_ptr_->get_point(vertex)); -// points.push_back(get_point_(vertex)); -#ifdef DEBUG_TRACES - std::clog << "#(" << vertex << ")#"; -#endif // DEBUG_TRACES + // Get the minimal radius of all faces enclosing balls if exists + if (!enclosing_ball_spheres.empty()) { + // radius = *enclosing_ball_radii.begin(); + Sphere sph_min = *enclosing_ball_spheres.begin(); + radius = std::sqrt(cast_to_double(sph_min.second)); + // std::clog << "CHECK that radius of min sphere is min radius: " << std::sqrt(cast_to_double(sph_min.second)) << "; and RADIUS min: " << radius << std::endl; + // Set all key_to_permute permutations to min sphere in cache + do + { + cache_[key_to_permute] = sph_min; + } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); } - // TODO to remove - //Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points); - // Hind: Here change the algo of the enclosing Minimal_enclosing_ball_radius - auto point_to_be_inserted = points.back(); - Sphere sph = get_sphere(points.cbegin(), points.cend()-1); - -// Sphere sph = get_sphere(points.cbegin(), points.cend()-1); - CGAL::NT_converter cast_to_double; -// CGAL::NT_converter cast_point_d_to_double; + std::clog << "END OF FACES ; radius = " << radius << std::endl; - std::clog << "circumcenter: " << sph.first << ", radius: " << std::sqrt(cast_to_double(sph.second))<< std::endl; - // TODO to remove - // Filtration_value test = std::sqrt(CGAL::to_double(sph.second)); - - - // Check that the point to be inserted is already included in the sphere of the simplex containing the preceding points - // TODO instead of Euclidean_distance ; use kernel_.squared_distance_d_object()(c, *begin); - // Add a loop on the three faces to check sphere before computing the circumsphere - // Add the computed sphere as cache; a vector of spheres depending on the number of faces ? - // -// if (Gudhi::Euclidean_distance()(cast_point_d_to_double(sph.first), point_to_be_inserted) > std::sqrt(cast_to_double(sph.second))) -// FT r = kernel_.squared_distance_d_object()(sph.first, sph.first); //*(points.cend()-1)); - if (kernel_.squared_distance_d_object()(sph.first, point_to_be_inserted) > sph.second) - sph = get_sphere(points.cbegin(), points.cend()); - - Filtration_value radius = std::sqrt(cast_to_double(sph.second)); + if (radius == 0.) { // Spheres of each face don't contain the whole simplex + Point_cloud points; + for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) { + points.push_back(cc_ptr_->get_point(vertex)); + } + Sphere sph = get_sphere(points.cbegin(), points.cend()); + radius = std::sqrt(cast_to_double(sph.second)); + std::clog << "GLOBAL SPHERE radius = " << radius << std::endl; + // Set all key_to_permute permutations to sphere in cache + do + { + cache_[key_to_permute] = sph; + } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); + } #ifdef DEBUG_TRACES @@ -170,8 +208,7 @@ class Cech_blocker { SimplicialComplexForCech* sc_ptr_; Cech_complex* cc_ptr_; Kernel kernel_; - //Vector_vertex_iterator vertex_handle_to_iterator_; - + std::map cache_; }; } // namespace cech_complex -- cgit v1.2.3 From 839093da012bda0ee16744f1e340a8a8eb04f0af Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 6 Sep 2021 17:19:32 +0200 Subject: Remove unnecessary key permutations storage in cache --- .../example/cech_complex_example_from_points.cpp | 16 ++++- .../include/gudhi/Cech_complex_blocker.h | 70 ++++++++++++++-------- 2 files changed, 59 insertions(+), 27 deletions(-) diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp index ac17fc73..e78ad51d 100644 --- a/src/Cech_complex/example/cech_complex_example_from_points.cpp +++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp @@ -43,6 +43,18 @@ int main() { std::vector point5({3. + std::sqrt(3.), 3.}); points.emplace_back(point5.begin(), point5.end()); +/* + std::vector point6({1., 4.}); + points.emplace_back(point6.begin(), point6.end()); + std::vector point7({3., 4.}); + points.emplace_back(point7.begin(), point7.end()); + std::vector point8({2., 4. + std::sqrt(3.)}); + points.emplace_back(point8.begin(), point8.end()); + std::vector point9({0., 4.}); + points.emplace_back(point9.begin(), point9.end()); + std::vector point10({-0.5, 2.}); + points.emplace_back(point10.begin(), point10.end()); +*/ // points.emplace_back(Point(std::vector({1., 0.}))); // points.emplace_back(Point(std::vector({0., 1.}))); // points.emplace_back(Point(std::vector({2., 1.}))); @@ -68,13 +80,13 @@ int main() { // ---------------------------------------------------------------------------- // Init of a Cech complex from points // ---------------------------------------------------------------------------- - Filtration_value max_radius = 10.; + Filtration_value max_radius = 10.; //100.; std::clog << "Hind: Just before the Cech constructor" << std::endl; Cech_complex cech_complex_from_points(points, max_radius); std::clog << "Hind: Just after the Cech constructor" << std::endl; Simplex_tree stree; - cech_complex_from_points.create_complex(stree, 3); + cech_complex_from_points.create_complex(stree, 3); //6 // ---------------------------------------------------------------------------- // Display information about the one skeleton Cech complex // ---------------------------------------------------------------------------- diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index acb53143..0fc76c6d 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -102,7 +102,8 @@ class Cech_blocker { using Point_cloud = std::vector; CGAL::NT_converter cast_to_double; Filtration_value radius = 0.; - std::string key_to_permute; +// std::string key_to_permute; + std::vector faces_keys; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices // std::set enclosing_ball_radii; @@ -113,12 +114,12 @@ class Cech_blocker { auto longlist = sc_ptr_->simplex_vertex_range(sh); auto shortlist = sc_ptr_->simplex_vertex_range(face); - std::clog << "Hind debug: within FACE loop "<< std::endl; +// std::clog << "Hind debug: within FACE loop "<< std::endl; // TODO to remove - for (auto i = std::begin(longlist); i != std::end(longlist);++i) - std::clog << "Hind debug: longlist: " << cc_ptr_->get_point(*i) << std::endl; - for (auto i = std::begin(shortlist); i != std::end(shortlist);++i) - std::clog << "Hind debug: shortlist: " << cc_ptr_->get_point(*i) << std::endl; +// for (auto i = std::begin(longlist); i != std::end(longlist);++i) +// std::clog << "Hind debug: longlist: " << cc_ptr_->get_point(*i) << std::endl; +// for (auto i = std::begin(shortlist); i != std::end(shortlist);++i) +// std::clog << "Hind debug: shortlist: " << cc_ptr_->get_point(*i) << std::endl; auto longiter = std::begin(longlist); auto shortiter = std::begin(shortlist); @@ -126,7 +127,7 @@ class Cech_blocker { while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; } auto extra = *longiter; // Vertex_handle - std::clog << "Hind debug: extra vertex: " << cc_ptr_->get_point(extra) << std::endl; +// std::clog << "Hind debug: extra vertex: " << cc_ptr_->get_point(extra) << std::endl; Point_cloud face_points; std::string key, key_extra; @@ -139,10 +140,11 @@ class Cech_blocker { } key_extra = key; key_extra.append(std::to_string(extra)); - key_to_permute = key_extra; - std::clog << "END OF VERTICES " << std::endl; - std::clog << "KEY is: " << key << std::endl; - std::clog << "KEY extra is: " << key_extra << std::endl; + faces_keys.push_back(key_extra); +// key_to_permute = key_extra; +// std::clog << "END OF VERTICES " << std::endl; +// std::clog << "KEY is: " << key << std::endl; +// std::clog << "KEY extra is: " << key_extra << std::endl; Sphere sph; auto it = cache_.find(key); if(it != cache_.end()) @@ -155,14 +157,14 @@ class Cech_blocker { #ifdef DEBUG_TRACES std::clog << "circumcenter: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES - std::clog << "distance FYI: " << kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " < " << cast_to_double(sph.second) << std::endl; +// std::clog << "distance FYI: " << kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " < " << cast_to_double(sph.second) << std::endl; // enclosing_ball_radii.insert(radius); enclosing_ball_spheres.insert(sph); cache_[key_extra] = sph; } - else {// TODO to remove - std::clog << "vertex not included BECAUSE DISTANCE: "<< kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " AND RAD SPHERE: " << sph.second << std::endl; - } +// else {// TODO to remove +// std::clog << "vertex not included BECAUSE DISTANCE: "<< kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " AND RAD SPHERE: " << sph.second << std::endl; +// } } // Get the minimal radius of all faces enclosing balls if exists if (!enclosing_ball_spheres.empty()) { @@ -171,12 +173,21 @@ class Cech_blocker { radius = std::sqrt(cast_to_double(sph_min.second)); // std::clog << "CHECK that radius of min sphere is min radius: " << std::sqrt(cast_to_double(sph_min.second)) << "; and RADIUS min: " << radius << std::endl; // Set all key_to_permute permutations to min sphere in cache - do - { - cache_[key_to_permute] = sph_min; - } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); +// do +// { +// if (cache_.find(key_to_permute) != cache_.end()) { +// if (cast_to_double(cache_[key_to_permute].second) > cast_to_double(sph_min.second)) +// cache_[key_to_permute] = sph_min; +// } +// else { +// cache_[key_to_permute] = sph_min; +// } +// } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); + for (auto k : faces_keys) { + cache_[k] = sph_min; + } } - std::clog << "END OF FACES ; radius = " << radius << std::endl; +// std::clog << "END OF FACES ; radius = " << radius << std::endl; if (radius == 0.) { // Spheres of each face don't contain the whole simplex Point_cloud points; @@ -185,12 +196,21 @@ class Cech_blocker { } Sphere sph = get_sphere(points.cbegin(), points.cend()); radius = std::sqrt(cast_to_double(sph.second)); - std::clog << "GLOBAL SPHERE radius = " << radius << std::endl; +// std::clog << "GLOBAL SPHERE radius = " << radius << std::endl; // Set all key_to_permute permutations to sphere in cache - do - { - cache_[key_to_permute] = sph; - } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); +// do +// { +// // if (cache_.find(key_to_permute) != cache_.end()) { +// // if (cast_to_double(cache_[key_to_permute].second) > cast_to_double(sph.second)) +// // cache_[key_to_permute] = sph; +// // } +// // else { +// // cache_[key_to_permute] = sph; +// // } +// } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); +// for (auto k : faces_keys) { +// cache_[k] = sph; +// } } -- cgit v1.2.3 From 44f754ee58aeee043891f4494892798b9807374b Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 8 Sep 2021 14:55:48 +0200 Subject: Change cache so that the index of the stored sphere is used as key --- .../include/gudhi/Cech_complex_blocker.h | 44 ++++++++++++++-------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 0fc76c6d..3cac9ee2 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -103,7 +103,7 @@ class Cech_blocker { CGAL::NT_converter cast_to_double; Filtration_value radius = 0.; // std::string key_to_permute; - std::vector faces_keys; +// std::vector faces_keys; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices // std::set enclosing_ball_radii; @@ -130,28 +130,35 @@ class Cech_blocker { // std::clog << "Hind debug: extra vertex: " << cc_ptr_->get_point(extra) << std::endl; Point_cloud face_points; - std::string key, key_extra; +// std::string key, key_extra; for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { face_points.push_back(cc_ptr_->get_point(vertex)); - key.append(std::to_string(vertex)); +// key.append(std::to_string(vertex)); #ifdef DEBUG_TRACES std::clog << "#(" << vertex << ")#"; #endif // DEBUG_TRACES } - key_extra = key; - key_extra.append(std::to_string(extra)); - faces_keys.push_back(key_extra); +// key_extra = key; +// key_extra.append(std::to_string(extra)); +// faces_keys.push_back(key_extra); // key_to_permute = key_extra; // std::clog << "END OF VERTICES " << std::endl; // std::clog << "KEY is: " << key << std::endl; // std::clog << "KEY extra is: " << key_extra << std::endl; Sphere sph; - auto it = cache_.find(key); - if(it != cache_.end()) - sph = it->second; + auto k = sc_ptr_->key(sh); + if(k != sc_ptr_->null_key()) + sph = cache_[k]; else { sph = get_sphere(face_points.cbegin(), face_points.cend()); } +// auto it = cache_.find(key); +// if(it != cache_.end()) +// sph = it->second; +// else { +// sph = get_sphere(face_points.cbegin(), face_points.cend()); +// } + if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { radius = std::sqrt(cast_to_double(sph.second)); #ifdef DEBUG_TRACES @@ -160,7 +167,9 @@ class Cech_blocker { // std::clog << "distance FYI: " << kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " < " << cast_to_double(sph.second) << std::endl; // enclosing_ball_radii.insert(radius); enclosing_ball_spheres.insert(sph); - cache_[key_extra] = sph; +// cache_[key_extra] = sph; +// sc_ptr_->assign_key(sh, cache_.size()); +// cache_.push_back(sph); } // else {// TODO to remove // std::clog << "vertex not included BECAUSE DISTANCE: "<< kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " AND RAD SPHERE: " << sph.second << std::endl; @@ -174,7 +183,7 @@ class Cech_blocker { // std::clog << "CHECK that radius of min sphere is min radius: " << std::sqrt(cast_to_double(sph_min.second)) << "; and RADIUS min: " << radius << std::endl; // Set all key_to_permute permutations to min sphere in cache // do -// { +// { // if (cache_.find(key_to_permute) != cache_.end()) { // if (cast_to_double(cache_[key_to_permute].second) > cast_to_double(sph_min.second)) // cache_[key_to_permute] = sph_min; @@ -183,9 +192,11 @@ class Cech_blocker { // cache_[key_to_permute] = sph_min; // } // } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); - for (auto k : faces_keys) { - cache_[k] = sph_min; - } +// for (auto k : faces_keys) { +// cache_[k] = sph_min; +// } + sc_ptr_->assign_key(sh, cache_.size()); + cache_.push_back(sph_min); } // std::clog << "END OF FACES ; radius = " << radius << std::endl; @@ -211,6 +222,8 @@ class Cech_blocker { // for (auto k : faces_keys) { // cache_[k] = sph; // } +// sc_ptr_->assign_key(sh, cache_.size()); +// cache_.push_back(sph); } @@ -228,7 +241,8 @@ class Cech_blocker { SimplicialComplexForCech* sc_ptr_; Cech_complex* cc_ptr_; Kernel kernel_; - std::map cache_; +// std::map cache_; + std::vector cache_; }; } // namespace cech_complex -- cgit v1.2.3 From 8749199e00c0ed1c32b8e0198a65797de3ad192a Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 27 Sep 2021 15:19:25 +0200 Subject: Add option in cmake to enable or not the inclusion of datasets fetching test (disabled by default) --- src/cmake/modules/GUDHI_modules.cmake | 11 ++++++----- src/python/CMakeLists.txt | 4 +++- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake index ccaf1ac5..9cc1a8f5 100644 --- a/src/cmake/modules/GUDHI_modules.cmake +++ b/src/cmake/modules/GUDHI_modules.cmake @@ -17,11 +17,12 @@ function(add_gudhi_module file_path) endfunction(add_gudhi_module) -option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF) -option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF) -option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON) -option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON) -option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON) +option(WITH_GUDHI_BENCHMARK "Activate/deactivate benchmark compilation" OFF) +option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation and installation" OFF) +option(WITH_NETWORK "Activate/deactivate datasets fetching test which uses the Internet" OFF) +option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) +option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON) +option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON) if (WITH_GUDHI_BENCHMARK) set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};benchmark") diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 6f117588..6c8dfe32 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -543,7 +543,9 @@ if(PYTHONINTERP_FOUND) endif() # Fetch remote datasets - add_gudhi_py_test(test_remote_datasets) + if(WITH_NETWORK) + add_gudhi_py_test(test_remote_datasets) + endif() # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") -- cgit v1.2.3 From 613db2444a9a12a64b097b944d0180e4fdbff71f Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 27 Sep 2021 17:32:55 +0200 Subject: Document option WITH_NETWORK in installation manual and tests_strategy Enable WITH_NETWORK option in some of the CI platforms (for a minimal testing) --- .appveyor.yml | 5 ++++- .circleci/config.yml | 18 ++++++++++++++++++ .github/for_maintainers/tests_strategy.md | 4 +++- src/common/doc/installation.h | 2 ++ 4 files changed, 27 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 9ff8f157..b44e08e1 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -29,6 +29,9 @@ environment: - target: Python CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON + - target: PythonTestsWithNetwork + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_NETWORK=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON + cache: - c:\Tools\vcpkg\installed @@ -56,7 +59,7 @@ build_script: - mkdir build - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% %CMAKE_GMP_FLAGS% %CMAKE_MPFR_FLAGS% %CMAKE_VCPKG_FLAGS% .. - - if [%target%]==[Python] ( + - if [[%target%]==[Python] || [%target%]==[PythonTestsWithNetwork]] ( cd src/python & type setup.py & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & diff --git a/.circleci/config.yml b/.circleci/config.yml index f6a875dd..85e42f8a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -77,6 +77,23 @@ jobs: path: /tmp/htmlcov destination: htmlcov + python_tests_with_network: + docker: + - image: gudhi/ci_for_gudhi:latest + steps: + - checkout + - run: + name: Build and test python module with network + command: | + git submodule init + git submodule update + mkdir build + cd build + cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_TEST=ON -DWITH_NETWORK=ON .. + cd src/python + python3 setup.py build_ext --inplace + ctest --output-on-failure + doxygen: docker: - image: gudhi/ci_for_gudhi:latest @@ -245,4 +262,5 @@ workflows: - tests - utils - python + - python_tests_with_network - doxygen diff --git a/.github/for_maintainers/tests_strategy.md b/.github/for_maintainers/tests_strategy.md index 9c181740..8fd7ac0d 100644 --- a/.github/for_maintainers/tests_strategy.md +++ b/.github/for_maintainers/tests_strategy.md @@ -8,13 +8,14 @@ The aim is to help maintainers to anticipate third parties modifications, update ### Linux -As all the third parties are already installed (thanks to docker), the compilations has been seperated by categories to be parallelized: +As all the third parties are already installed (thanks to docker), the compilations has been separated by categories to be parallelized: * examples (C++) * tests (C++) * utils (C++) * doxygen (C++ documentation that is available in the artefacts) * python (including documentation and code coverage that are available in the artefacts) +* python_tests_with_network (includes previous python with WITH_NETWORK option enabled which adds datasets fetching test) (cf. `.circleci/config.yml`) @@ -45,6 +46,7 @@ The compilations has been seperated by categories to be parallelized, but I don' * tests (C++) * utils (C++) * python +* python tests with network Doxygen (C++) is not tested. (cf. `.appveyor.yml`) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 610aa17e..72d4b1e5 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -40,6 +40,8 @@ make \endverbatim * `make test` is using Ctest (CMake test driver * program). If some of the tests are failing, please send us the result of the following command: * \verbatim ctest --output-on-failure \endverbatim + * Testing fetching datasets feature requires the use of the internet and is disabled by default. If you want to include this test, set WITH_NETWORK to ON when building in the previous step (note that this test is included in the python module): + * \verbatim cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_TEST=ON -DWITH_NETWORK=ON --DWITH_GUDHI_PYTHON=ON .. \endverbatim * * \subsection documentationgeneration Documentation * To generate the documentation, Doxygen is required. -- cgit v1.2.3 From 3447def563dd7817f7fc1009133685a0ff6ddb43 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 28 Sep 2021 14:25:10 +0200 Subject: Use 'or' in if instead of '||' in appveyor.yml --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index b44e08e1..ca2ca15c 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -59,7 +59,7 @@ build_script: - mkdir build - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% %CMAKE_GMP_FLAGS% %CMAKE_MPFR_FLAGS% %CMAKE_VCPKG_FLAGS% .. - - if [[%target%]==[Python] || [%target%]==[PythonTestsWithNetwork]] ( + - if or ([%target%]==[Python]) ([%target%]==[PythonTestsWithNetwork]) ( cd src/python & type setup.py & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & -- cgit v1.2.3 From c2bdc8a749449d41ec367089aecd975fe633c121 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 28 Sep 2021 14:42:35 +0200 Subject: Replace '()' with '{}' in if statement in appveyor.yml --- .appveyor.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index ca2ca15c..521ec42d 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -59,12 +59,12 @@ build_script: - mkdir build - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% %CMAKE_GMP_FLAGS% %CMAKE_MPFR_FLAGS% %CMAKE_VCPKG_FLAGS% .. - - if or ([%target%]==[Python]) ([%target%]==[PythonTestsWithNetwork]) ( + - if or ([%target%]==[Python]) ([%target%]==[PythonTestsWithNetwork]) { cd src/python & type setup.py & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 --output-on-failure -C Release - ) else ( + } else { MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 --output-on-failure -C Release -E diff_files - ) + } -- cgit v1.2.3 From 767d9fca5da2d3dd9698a5c27e9bedc159271f67 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 30 Sep 2021 11:05:17 +0200 Subject: Move minimal enclosing balls cache to Cech_complex.h instead of the blocker Modify cech test to be more relevant regarding the current algorithm Do some cleaning --- .../benchmark/cech_complex_benchmark.cpp | 2 +- .../example/cech_complex_example_from_points.cpp | 42 +------ .../example/cech_complex_step_by_step.cpp | 8 -- src/Cech_complex/include/gudhi/Cech_complex.h | 38 +++---- .../include/gudhi/Cech_complex_blocker.h | 121 +++------------------ src/Cech_complex/test/test_cech_complex.cpp | 37 ++----- src/Cech_complex/utilities/cech_persistence.cpp | 4 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 4 - src/common/include/gudhi/distance_functions.h | 20 +--- .../include/gudhi/graph_simplicial_complex.h | 3 - 10 files changed, 54 insertions(+), 225 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index c332c656..4a1aa06e 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -33,7 +33,7 @@ using Points_off_reader = Gudhi::Points_off_reader; using Proximity_graph = Gudhi::Proximity_graph; using Rips_complex = Gudhi::rips_complex::Rips_complex; using Kernel = CGAL::Epeck_d; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Cech_complex = Gudhi::cech_complex::Cech_complex; class Minimal_enclosing_ball_radius { public: diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp index e78ad51d..78861951 100644 --- a/src/Cech_complex/example/cech_complex_example_from_points.cpp +++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp @@ -10,25 +10,15 @@ int main() { // Type definitions -// using Point_cloud = std::vector>; using Simplex_tree = Gudhi::Simplex_tree; using Filtration_value = Simplex_tree::Filtration_value; using Kernel = CGAL::Epeck_d; using FT = typename Kernel::FT; using Point = typename Kernel::Point_d; using Point_cloud = std::vector; - using Cech_complex = Gudhi::cech_complex::Cech_complex; + using Cech_complex = Gudhi::cech_complex::Cech_complex; Point_cloud points; -// points.push_back({1., 0.}); // 0 -// points.push_back({0., 1.}); // 1 -// points.push_back({2., 1.}); // 2 -// points.push_back({3., 2.}); // 3 -// points.push_back({0., 3.}); // 4 -// points.push_back({3. + std::sqrt(3.), 3.}); // 5 - -// std::vector point({0.0, 0.0, 0.0, 0.0}); -// points.emplace_back(point.begin(), point.end()); std::vector point0({1., 0.}); points.emplace_back(point0.begin(), point0.end()); @@ -42,8 +32,6 @@ int main() { points.emplace_back(point4.begin(), point4.end()); std::vector point5({3. + std::sqrt(3.), 3.}); points.emplace_back(point5.begin(), point5.end()); - -/* std::vector point6({1., 4.}); points.emplace_back(point6.begin(), point6.end()); std::vector point7({3., 4.}); @@ -54,39 +42,15 @@ int main() { points.emplace_back(point9.begin(), point9.end()); std::vector point10({-0.5, 2.}); points.emplace_back(point10.begin(), point10.end()); -*/ -// points.emplace_back(Point(std::vector({1., 0.}))); -// points.emplace_back(Point(std::vector({0., 1.}))); -// points.emplace_back(Point(std::vector({2., 1.}))); -// points.emplace_back(Point(std::vector({3., 2.}))); -// points.emplace_back(Point(std::vector({0., 3.}))); -// points.emplace_back(Point(std::vector({3. + std::sqrt(3.), 3.}))); - - -// points.push_back(Point(1.0, 0.0)); -// points.push_back(Point(0.0, 1.0)); -// points.push_back(Point(2.0, 1.0)); -// points.push_back(Point(3.0, 2.0)); -// points.push_back(Point(0.0, 3.0)); -// points.push_back(Point(3.0 + std::sqrt(3.0), 3.0)); - - -// points.push_back({1., 4.}); // 6 -// points.push_back({3., 4.}); // 7 -// points.push_back({2., 4. + std::sqrt(3.)}); // 8 -// points.push_back({0., 4.}); // 9 -// points.push_back({-0.5, 2.}); // 10 // ---------------------------------------------------------------------------- // Init of a Cech complex from points // ---------------------------------------------------------------------------- - Filtration_value max_radius = 10.; //100.; - std::clog << "Hind: Just before the Cech constructor" << std::endl; + Filtration_value max_radius = 100.; //100.; Cech_complex cech_complex_from_points(points, max_radius); - std::clog << "Hind: Just after the Cech constructor" << std::endl; Simplex_tree stree; - cech_complex_from_points.create_complex(stree, 3); //6 + cech_complex_from_points.create_complex(stree, 6); //6 // ---------------------------------------------------------------------------- // Display information about the one skeleton Cech complex // ---------------------------------------------------------------------------- diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index ac08e6cc..44e7f945 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -13,8 +13,6 @@ #include #include -#include // TODO to remove ? - #include #include @@ -36,7 +34,6 @@ using Simplex_tree = Gudhi::Simplex_tree<>; using Simplex_handle = Simplex_tree::Simplex_handle; using Filtration_value = Simplex_tree::Filtration_value; -// using Point = std::vector; using Kernel = CGAL::Epeck_d; using Point = typename Kernel::Point_d; using Points_off_reader = Gudhi::Points_off_reader; @@ -45,9 +42,6 @@ using Proximity_graph = Gudhi::Proximity_graph; class Cech_blocker { private: using Point_cloud = std::vector; -// using Point_iterator = Point_cloud::const_iterator; -// using Coordinate_iterator = Point::const_iterator; -// using Min_sphere = Gudhi::Miniball::Miniball>; public: bool operator()(Simplex_handle sh) { @@ -67,14 +61,12 @@ class Cech_blocker { } Cech_blocker(Simplex_tree& simplex_tree, Filtration_value max_radius, const std::vector& point_cloud) : simplex_tree_(simplex_tree), max_radius_(max_radius), point_cloud_(point_cloud) { -// dimension_ = point_cloud_[0].size(); } private: Simplex_tree simplex_tree_; Filtration_value max_radius_; std::vector point_cloud_; -// int dimension_; }; void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max); diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index 2c6f202a..32a78aec 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -40,7 +40,7 @@ namespace cech_complex { * \tparam ForwardPointRange must be a range for which `std::begin()` and `std::end()` methods return input * iterators on a point. `std::begin()` and `std::end()` methods are also required for a point. */ -template +template class Cech_complex { private: // Required by compute_proximity_graph @@ -48,17 +48,17 @@ class Cech_complex { using Filtration_value = typename SimplicialComplexForProximityGraph::Filtration_value; using Proximity_graph = Gudhi::Proximity_graph; - // Retrieve Coordinate type from ForwardPointRange -// using Point_from_range_iterator = typename boost::range_const_iterator::type; -// using Point_from_range = typename std::iterator_traits::value_type; -// using Coordinate_iterator = typename boost::range_const_iterator::type; -// using Coordinate = typename std::iterator_traits::value_type; - public: - // Point and Point_cloud type definition - //using Point = std::vector; - using Point = typename Kernel::Point_d; - using Point_cloud = std::vector; + + using cech_blocker = Cech_blocker; + + using Point_d = typename cech_blocker::Point_d; + using Point_cloud = std::vector; + + // Numeric type of coordinates in the kernel + using FT = typename cech_blocker::FT; + // Sphere is a pair of point and squared radius. + using Sphere = typename std::pair; public: /** \brief Cech_complex constructor from a list of points. @@ -77,7 +77,6 @@ class Cech_complex { point_cloud_.assign(points.begin(), points.end()); - std::clog << "Hind: Just before the graph compute" << std::endl; cech_skeleton_graph_ = Gudhi::compute_proximity_graph( point_cloud_, max_radius_, Gudhi::Minimal_enclosing_ball_radius()); } @@ -90,19 +89,14 @@ class Cech_complex { * @exception std::invalid_argument In debug mode, if `complex.num_vertices()` does not return 0. * */ - template void create_complex(SimplicialComplexForCechComplex& complex, int dim_max) { - std::clog << "Hind: in create complex" << std::endl; GUDHI_CHECK(complex.num_vertices() == 0, std::invalid_argument("Cech_complex::create_complex - simplicial complex is not empty")); // insert the proximity graph in the simplicial complex - std::clog << "Hind: before insert_graph" << std::endl; complex.insert_graph(cech_skeleton_graph_); // expand the graph until dimension dim_max - std::clog << "Hind: before expansion_with_blockers" << std::endl; - complex.expansion_with_blockers(dim_max, - Cech_blocker(&complex, this)); + complex.expansion_with_blockers(dim_max, cech_blocker(&complex, this)); } /** @return max_radius value given at construction. */ @@ -111,12 +105,18 @@ class Cech_complex { /** @param[in] vertex Point position in the range. * @return The point. */ - const Point& get_point(Vertex_handle vertex) const { return point_cloud_[vertex]; } + const Point_d& get_point(Vertex_handle vertex) const { return point_cloud_[vertex]; } + + /** + * @return Vector of cached spheres. + */ + std::vector & get_cache() { return cache_; } private: Proximity_graph cech_skeleton_graph_; Filtration_value max_radius_; Point_cloud point_cloud_; + std::vector cache_; }; } // namespace cech_complex diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 3cac9ee2..5edd005d 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -11,20 +11,11 @@ #ifndef CECH_COMPLEX_BLOCKER_H_ #define CECH_COMPLEX_BLOCKER_H_ -// TODO to remove -#include // for Gudhi::Minimal_enclosing_ball_radius - -#include // for CGAL::to_double -#include // -// #include -#include // For EXACT or SAFE version -#include +#include // for casting from FT to double #include #include #include -#include -#include #include // for std::sqrt namespace Gudhi { @@ -48,7 +39,6 @@ namespace cech_complex { template class Cech_blocker { private: -// using Point_cloud = typename Cech_complex::Point_cloud; using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle; using Filtration_value = typename SimplicialComplexForCech::Filtration_value; @@ -61,28 +51,18 @@ class Cech_blocker { // Sphere is a pair of point and squared radius. using Sphere = typename std::pair; - - /** \internal \brief TODO - * \param[in] - * \return */ template FT get_squared_radius(PointIterator begin, PointIterator end) const { return kernel_.compute_squared_radius_d_object()(begin, end); } - - /** \internal \brief TODO - * \param[in] - * \return */ + template Sphere get_sphere(PointIterator begin, PointIterator end) const { Point_d c = kernel_.construct_circumcenter_d_object()(begin, end); FT r = kernel_.squared_distance_d_object()(c, *begin); return std::make_pair(std::move(c), std::move(r)); } - - /** \internal \brief TODO - * \param[in] - * \return */ + template class CompareSpheresRadii { @@ -93,7 +73,7 @@ class Cech_blocker { return cast_to_double(firstSphere.second) < cast_to_double(secondSphere.second); } }; - + /** \internal \brief Čech complex blocker operator() - the oracle - assigns the filtration value from the simplex * radius and returns if the simplex expansion must be blocked. * \param[in] sh The Simplex_handle. @@ -102,104 +82,54 @@ class Cech_blocker { using Point_cloud = std::vector; CGAL::NT_converter cast_to_double; Filtration_value radius = 0.; -// std::string key_to_permute; -// std::vector faces_keys; - + // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices - // std::set enclosing_ball_radii; std::set > enclosing_ball_spheres; for (auto face : sc_ptr_->boundary_simplex_range(sh)) { - // Find which vertex of sh is missing in face. We rely on the fact that simplex_vertex_range is sorted. auto longlist = sc_ptr_->simplex_vertex_range(sh); auto shortlist = sc_ptr_->simplex_vertex_range(face); -// std::clog << "Hind debug: within FACE loop "<< std::endl; - // TODO to remove -// for (auto i = std::begin(longlist); i != std::end(longlist);++i) -// std::clog << "Hind debug: longlist: " << cc_ptr_->get_point(*i) << std::endl; -// for (auto i = std::begin(shortlist); i != std::end(shortlist);++i) -// std::clog << "Hind debug: shortlist: " << cc_ptr_->get_point(*i) << std::endl; - auto longiter = std::begin(longlist); auto shortiter = std::begin(shortlist); auto enditer = std::end(shortlist); while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; } auto extra = *longiter; // Vertex_handle -// std::clog << "Hind debug: extra vertex: " << cc_ptr_->get_point(extra) << std::endl; - Point_cloud face_points; -// std::string key, key_extra; for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { face_points.push_back(cc_ptr_->get_point(vertex)); -// key.append(std::to_string(vertex)); #ifdef DEBUG_TRACES std::clog << "#(" << vertex << ")#"; #endif // DEBUG_TRACES } -// key_extra = key; -// key_extra.append(std::to_string(extra)); -// faces_keys.push_back(key_extra); -// key_to_permute = key_extra; -// std::clog << "END OF VERTICES " << std::endl; -// std::clog << "KEY is: " << key << std::endl; -// std::clog << "KEY extra is: " << key_extra << std::endl; Sphere sph; - auto k = sc_ptr_->key(sh); - if(k != sc_ptr_->null_key()) - sph = cache_[k]; + auto face_sh = sc_ptr_->find(sc_ptr_->simplex_vertex_range(face)); + auto k = sc_ptr_->key(face_sh); + if(k != sc_ptr_->null_key()) { + sph = cc_ptr_->get_cache().at(k); + } else { sph = get_sphere(face_points.cbegin(), face_points.cend()); } -// auto it = cache_.find(key); -// if(it != cache_.end()) -// sph = it->second; -// else { -// sph = get_sphere(face_points.cbegin(), face_points.cend()); -// } if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { radius = std::sqrt(cast_to_double(sph.second)); #ifdef DEBUG_TRACES std::clog << "circumcenter: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES -// std::clog << "distance FYI: " << kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " < " << cast_to_double(sph.second) << std::endl; - // enclosing_ball_radii.insert(radius); enclosing_ball_spheres.insert(sph); -// cache_[key_extra] = sph; -// sc_ptr_->assign_key(sh, cache_.size()); -// cache_.push_back(sph); } -// else {// TODO to remove -// std::clog << "vertex not included BECAUSE DISTANCE: "<< kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) << " AND RAD SPHERE: " << sph.second << std::endl; -// } } // Get the minimal radius of all faces enclosing balls if exists if (!enclosing_ball_spheres.empty()) { - // radius = *enclosing_ball_radii.begin(); Sphere sph_min = *enclosing_ball_spheres.begin(); radius = std::sqrt(cast_to_double(sph_min.second)); - // std::clog << "CHECK that radius of min sphere is min radius: " << std::sqrt(cast_to_double(sph_min.second)) << "; and RADIUS min: " << radius << std::endl; - // Set all key_to_permute permutations to min sphere in cache -// do -// { -// if (cache_.find(key_to_permute) != cache_.end()) { -// if (cast_to_double(cache_[key_to_permute].second) > cast_to_double(sph_min.second)) -// cache_[key_to_permute] = sph_min; -// } -// else { -// cache_[key_to_permute] = sph_min; -// } -// } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); -// for (auto k : faces_keys) { -// cache_[k] = sph_min; -// } - sc_ptr_->assign_key(sh, cache_.size()); - cache_.push_back(sph_min); + + sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); + cc_ptr_->get_cache().push_back(sph_min); } -// std::clog << "END OF FACES ; radius = " << radius << std::endl; - + if (radius == 0.) { // Spheres of each face don't contain the whole simplex Point_cloud points; for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) { @@ -207,25 +137,10 @@ class Cech_blocker { } Sphere sph = get_sphere(points.cbegin(), points.cend()); radius = std::sqrt(cast_to_double(sph.second)); -// std::clog << "GLOBAL SPHERE radius = " << radius << std::endl; - // Set all key_to_permute permutations to sphere in cache -// do -// { -// // if (cache_.find(key_to_permute) != cache_.end()) { -// // if (cast_to_double(cache_[key_to_permute].second) > cast_to_double(sph.second)) -// // cache_[key_to_permute] = sph; -// // } -// // else { -// // cache_[key_to_permute] = sph; -// // } -// } while(std::next_permutation(key_to_permute.begin(), key_to_permute.end())); -// for (auto k : faces_keys) { -// cache_[k] = sph; -// } -// sc_ptr_->assign_key(sh, cache_.size()); -// cache_.push_back(sph); - } + sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); + cc_ptr_->get_cache().push_back(sph); + } #ifdef DEBUG_TRACES if (radius > cc_ptr_->max_radius()) std::clog << "radius > max_radius => expansion is blocked\n"; @@ -241,8 +156,6 @@ class Cech_blocker { SimplicialComplexForCech* sc_ptr_; Cech_complex* cc_ptr_; Kernel kernel_; -// std::map cache_; - std::vector cache_; }; } // namespace cech_complex diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index 81efd6ae..51b466da 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -24,26 +24,19 @@ #include #include #include -#include #include // For EXACT or SAFE version - // Type definitions using Simplex_tree = Gudhi::Simplex_tree<>; using Filtration_value = Simplex_tree::Filtration_value; -//using Point = std::vector; using Kernel = CGAL::Epeck_d; using FT = typename Kernel::FT; using Point = typename Kernel::Point_d; using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; - -// using Point_iterator = Point_cloud::const_iterator; -// using Coordinate_iterator = Point::const_iterator; -// using Min_sphere = Gudhi::Miniball::Miniball>; +using Cech_complex = Gudhi::cech_complex::Cech_complex; BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { // ---------------------------------------------------------------------------- @@ -52,18 +45,6 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { // // ---------------------------------------------------------------------------- Point_cloud points; -// points.push_back({1., 0.}); // 0 -// points.push_back({0., 1.}); // 1 -// points.push_back({2., 1.}); // 2 -// points.push_back({3., 2.}); // 3 -// points.push_back({0., 3.}); // 4 -// points.push_back({3. + std::sqrt(3.), 3.}); // 5 -// points.push_back({1., 4.}); // 6 -// points.push_back({3., 4.}); // 7 -// points.push_back({2., 4. + std::sqrt(3.)}); // 8 -// points.push_back({0., 4.}); // 9 -// points.push_back({-0.5, 2.}); // 10 - std::vector point0({1., 0.}); points.emplace_back(point0.begin(), point0.end()); @@ -156,36 +137,32 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { for (std::size_t vertex = 0; vertex <= 2; vertex++) { points012.push_back(cech_complex_for_doc.get_point(vertex)); } -// std::size_t dimension = points[0].end() - points[0].begin(); -// Min_sphere ms012(dimension, points012.begin(), points012.end()); Kernel kern; Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2})); - CGAL::NT_converter cast_to_double; - std::clog << "f012= " << f012 << " | points012_radius= " << std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points012.begin(), points012.end()))) << std::endl; - + std::clog << "f012= " << f012 << std::endl; + CGAL::NT_converter cast_to_double; GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points012.begin(), points012.end())))); Point_cloud points1410; points1410.push_back(cech_complex_for_doc.get_point(1)); points1410.push_back(cech_complex_for_doc.get_point(4)); points1410.push_back(cech_complex_for_doc.get_point(10)); -// Min_sphere ms1410(dimension, points1410.begin(), points1410.end()); Simplex_tree::Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10})); - std::clog << "f1410= " << f1410 << " | points1410_radius= " << std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points1410.begin(), points1410.end()))) << std::endl; + std::clog << "f1410= " << f1410 << std::endl; - GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points1410.begin(), points1410.end())))); + // In this case, the computed sphere using CGAL kernel does not match the minimal enclosing ball; the filtration value check is therefore done against a hardcoded value + GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, 1.); Point_cloud points469; points469.push_back(cech_complex_for_doc.get_point(4)); points469.push_back(cech_complex_for_doc.get_point(6)); points469.push_back(cech_complex_for_doc.get_point(9)); -// Min_sphere ms469(dimension, points469.begin(), points469.end()); Simplex_tree::Filtration_value f469 = st2.filtration(st2.find({4, 6, 9})); - std::clog << "f469= " << f469 << " | points469_radius= " << std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points469.begin(), points469.end()))) << std::endl; + std::clog << "f469= " << f469 << std::endl; GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points469.begin(), points469.end())))); diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp index ccd7d453..0c945cad 100644 --- a/src/Cech_complex/utilities/cech_persistence.cpp +++ b/src/Cech_complex/utilities/cech_persistence.cpp @@ -25,14 +25,12 @@ // Types definition using Simplex_tree = Gudhi::Simplex_tree; using Filtration_value = Simplex_tree::Filtration_value; -// using Point = std::vector; -// using Point_cloud = std::vector; using Kernel = CGAL::Epeck_d; using Point = typename Kernel::Point_d; using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Cech_complex = Gudhi::cech_complex::Cech_complex; using Field_Zp = Gudhi::persistent_cohomology::Field_Zp; using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology; diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index f69ed6ec..85790baf 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -1278,10 +1278,8 @@ class Simplex_tree { intersection.emplace_back(next->first, Node(nullptr, filt)); } } - std::clog << "Hind: after intersection insertion" << std::endl; if (intersection.size() != 0) { // Reverse the order to insert - std::clog << "Hind: declare new siblings" << std::endl; Siblings * new_sib = new Siblings(siblings, // oncles simplex->first, // parent boost::adaptors::reverse(intersection)); // boost::container::ordered_unique_range_t @@ -1290,12 +1288,10 @@ class Simplex_tree { for (auto new_sib_member = new_sib->members().begin(); new_sib_member != new_sib->members().end(); new_sib_member++) { - std::clog << "Hind: check the blocker result" << std::endl; bool blocker_result = block_simplex(new_sib_member); // new_sib member has been blocked by the blocker function // add it to the list to be removed - do not perform it while looping on it if (blocker_result) { - std::clog << "Hind: add to list of blocked sib to be removed" << std::endl; blocked_new_sib_vertex_list.push_back(new_sib_member->first); } } diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h index ae5168aa..a8ee4a75 100644 --- a/src/common/include/gudhi/distance_functions.h +++ b/src/common/include/gudhi/distance_functions.h @@ -65,19 +65,17 @@ class Euclidean_distance { * The points are assumed to have the same dimension. */ class Minimal_enclosing_ball_radius { public: - /** \brief TODO + /** \brief Enclosing ball radius from two points using CGAL. * * @param[in] point_1 * @param[in] point_2 - * @return - * \tparam Point + * @return Enclosing ball radius for the two points. + * \tparam Point must be a Kernel::Point_d from CGAL. * */ - //typename FT = typename Kernel::FT, template< typename Kernel = CGAL::Epeck_d, typename Point= typename Kernel::Point_d> double operator()(const Point& point_1, const Point& point_2) const { - std::clog << "Added template: distance betw points 1 and 2" << std::endl; Kernel kernel_; return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; } @@ -94,25 +92,21 @@ class Minimal_enclosing_ball_radius { template< typename Point > typename std::iterator_traits::type>::value_type operator()(const Point& point_1, const Point& point_2) const { - std::clog << "Hind: Minimal_enclosing_ball_radius point1 et 2; Euclidean" << std::endl; std::clog << "#" << *point_1.begin() << "##" << *point_2.begin() << std::endl; return Euclidean_distance()(point_1, point_2) / 2.; } - - /** \brief TODO + /** \brief Enclosing ball radius from a point cloud using CGAL. * * @param[in] point_cloud The points. - * @return - * \tparam Point_cloud + * @return Enclosing ball radius for the points. + * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. * */ - //typename FT = typename Kernel::FT, template< typename Kernel = CGAL::Epeck_d, typename Point= typename Kernel::Point_d, typename Point_cloud = std::vector> double operator()(const Point_cloud& point_cloud) const { - std::clog << "Added template: distance in point cloud" << std::endl; Kernel kernel_; return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); } @@ -133,8 +127,6 @@ class Minimal_enclosing_ball_radius { typename Coordinate = typename std::iterator_traits::value_type> Coordinate operator()(const Point_cloud& point_cloud) const { - std::clog << "Hind: Minimal_enclosing_ball_radius point cloud; Miniball" << std::endl; - using Min_sphere = Miniball::Miniball>; Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end()); diff --git a/src/common/include/gudhi/graph_simplicial_complex.h b/src/common/include/gudhi/graph_simplicial_complex.h index 9190182c..da9dee7d 100644 --- a/src/common/include/gudhi/graph_simplicial_complex.h +++ b/src/common/include/gudhi/graph_simplicial_complex.h @@ -18,8 +18,6 @@ #include #include // for std::tie -#include - namespace Gudhi { /** @file * @brief Graph simplicial complex methods @@ -78,7 +76,6 @@ Proximity_graph compute_proximity_graph( for (auto it_u = points.begin(); it_u != points.end(); ++it_u) { idx_v = idx_u + 1; for (auto it_v = it_u + 1; it_v != points.end(); ++it_v, ++idx_v) { - std::clog << "#idx_u" << idx_u << "#idx_v " << idx_v << std::endl; fil = distance(*it_u, *it_v); if (fil <= threshold) { edges.emplace_back(idx_u, idx_v); -- cgit v1.2.3 From 65b32d167810a107cf807572f84cef082c76067d Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 30 Sep 2021 16:13:39 +0200 Subject: Add Cech tests, examples, utilities and benchmark to the build only if CGAL is present --- src/Cech_complex/benchmark/CMakeLists.txt | 18 ++++++++++-------- src/Cech_complex/example/CMakeLists.txt | 26 ++++++++++++++------------ src/Cech_complex/test/CMakeLists.txt | 19 +++++++++++-------- src/Cech_complex/utilities/CMakeLists.txt | 24 +++++++++++++----------- 4 files changed, 48 insertions(+), 39 deletions(-) diff --git a/src/Cech_complex/benchmark/CMakeLists.txt b/src/Cech_complex/benchmark/CMakeLists.txt index bc54c0f3..ccd1b324 100644 --- a/src/Cech_complex/benchmark/CMakeLists.txt +++ b/src/Cech_complex/benchmark/CMakeLists.txt @@ -1,13 +1,15 @@ project(Cech_complex_benchmark) -# Do not forget to copy test files in current binary dir -file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +if (CGAL_FOUND) + # Do not forget to copy test files in current binary dir + file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) -if(TARGET Boost::filesystem) - add_executable(cech_complex_benchmark cech_complex_benchmark.cpp) - target_link_libraries(cech_complex_benchmark Boost::filesystem) + if(TARGET Boost::filesystem) + add_executable(cech_complex_benchmark cech_complex_benchmark.cpp) + target_link_libraries(cech_complex_benchmark Boost::filesystem) - if (TBB_FOUND) - target_link_libraries(cech_complex_benchmark ${TBB_LIBRARIES}) + if (TBB_FOUND) + target_link_libraries(cech_complex_benchmark ${TBB_LIBRARIES}) + endif() endif() -endif() \ No newline at end of file +endif() diff --git a/src/Cech_complex/example/CMakeLists.txt b/src/Cech_complex/example/CMakeLists.txt index 1b08c7cb..52dec63f 100644 --- a/src/Cech_complex/example/CMakeLists.txt +++ b/src/Cech_complex/example/CMakeLists.txt @@ -1,17 +1,19 @@ project(Cech_complex_examples) -if (TARGET Boost::program_options) - add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp ) - target_link_libraries(Cech_complex_example_step_by_step Boost::program_options) - if (TBB_FOUND) - target_link_libraries(Cech_complex_example_step_by_step ${TBB_LIBRARIES}) +if (CGAL_FOUND) + if (TARGET Boost::program_options) + add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp ) + target_link_libraries(Cech_complex_example_step_by_step Boost::program_options) + if (TBB_FOUND) + target_link_libraries(Cech_complex_example_step_by_step ${TBB_LIBRARIES}) + endif() + add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $ + "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-d" "3") endif() - add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-d" "3") -endif() -add_executable ( Cech_complex_example_from_points cech_complex_example_from_points.cpp) -if (TBB_FOUND) - target_link_libraries(Cech_complex_example_from_points ${TBB_LIBRARIES}) + add_executable ( Cech_complex_example_from_points cech_complex_example_from_points.cpp) + if (TBB_FOUND) + target_link_libraries(Cech_complex_example_from_points ${TBB_LIBRARIES}) + endif() + add_test(NAME Cech_complex_example_from_points COMMAND $) endif() -add_test(NAME Cech_complex_example_from_points COMMAND $) diff --git a/src/Cech_complex/test/CMakeLists.txt b/src/Cech_complex/test/CMakeLists.txt index e6a2a18f..8e725f50 100644 --- a/src/Cech_complex/test/CMakeLists.txt +++ b/src/Cech_complex/test/CMakeLists.txt @@ -1,11 +1,14 @@ -include(GUDHI_boost_test) +if (CGAL_FOUND) + include(GUDHI_boost_test) -add_executable ( Cech_complex_test_unit test_cech_complex.cpp ) -if (TBB_FOUND) - target_link_libraries(Cech_complex_test_unit ${TBB_LIBRARIES}) -endif() + add_executable ( Cech_complex_test_unit test_cech_complex.cpp ) + if (TBB_FOUND) + target_link_libraries(Cech_complex_test_unit ${TBB_LIBRARIES}) + endif() + + # Do not forget to copy test files in current binary dir + file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) -# Do not forget to copy test files in current binary dir -file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + gudhi_add_boost_test(Cech_complex_test_unit) -gudhi_add_boost_test(Cech_complex_test_unit) +endif() diff --git a/src/Cech_complex/utilities/CMakeLists.txt b/src/Cech_complex/utilities/CMakeLists.txt index b183c8d8..efe40e57 100644 --- a/src/Cech_complex/utilities/CMakeLists.txt +++ b/src/Cech_complex/utilities/CMakeLists.txt @@ -1,15 +1,17 @@ -project(Cech_complex_utilities) +if (CGAL_FOUND) + project(Cech_complex_utilities) -if (TARGET Boost::program_options) - add_executable(cech_persistence cech_persistence.cpp) - target_link_libraries(cech_persistence Boost::program_options) + if (TARGET Boost::program_options) + add_executable(cech_persistence cech_persistence.cpp) + target_link_libraries(cech_persistence Boost::program_options) - if (TBB_FOUND) - target_link_libraries(cech_persistence ${TBB_LIBRARIES}) - endif() + if (TBB_FOUND) + target_link_libraries(cech_persistence ${TBB_LIBRARIES}) + endif() - add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3") + add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $ + "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3") - install(TARGETS cech_persistence DESTINATION bin) -endif() \ No newline at end of file + install(TARGETS cech_persistence DESTINATION bin) + endif() +endif() -- cgit v1.2.3 From d3970dbbc16993d348092899eb8fcd1ea1aceb8d Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 1 Oct 2021 15:47:58 +0200 Subject: Separate Minimal_enclosing_ball_radius class which uses CGAL from common distance_functions.h file --- .../benchmark/cech_complex_benchmark.cpp | 2 +- .../example/cech_complex_step_by_step.cpp | 2 +- src/Cech_complex/include/gudhi/Cech_complex.h | 2 +- .../include/gudhi/Cech_complex/Cech_kernel.h | 149 +++++++++++++++++++++ src/Cech_complex/test/test_cech_complex.cpp | 2 +- src/Cech_complex/utilities/cech_persistence.cpp | 2 +- src/common/include/gudhi/distance_functions.h | 83 ------------ 7 files changed, 154 insertions(+), 88 deletions(-) create mode 100644 src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index 4a1aa06e..cfeb0725 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -9,7 +9,7 @@ */ #include -#include +#include #include #include #include diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index 44e7f945..2d8321b1 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -9,7 +9,7 @@ */ #include -#include +#include #include #include diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index 32a78aec..7bbf97d1 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -11,7 +11,7 @@ #ifndef CECH_COMPLEX_H_ #define CECH_COMPLEX_H_ -#include // for Gudhi::Minimal_enclosing_ball_radius +#include // for Gudhi::Minimal_enclosing_ball_radius #include // for Gudhi::Proximity_graph #include // for GUDHI_CHECK #include // for Gudhi::cech_complex::Cech_blocker diff --git a/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h b/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h new file mode 100644 index 00000000..93af90d2 --- /dev/null +++ b/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h @@ -0,0 +1,149 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Hind Montassif + * + * Copyright (C) 2021 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef CECH_KERNEL_H_ +#define CECH_KERNEL_H_ + +#include + +#include // for std::sqrt + +namespace Gudhi { + +// namespace cech_complex { + +/** @brief Compute the radius of the minimal enclosing ball between Points given by a range of coordinates. + * The points are assumed to have the same dimension. */ +class Minimal_enclosing_ball_radius { + public: + /** \brief Enclosing ball radius from two points using CGAL. + * + * @param[in] point_1 + * @param[in] point_2 + * @return Enclosing ball radius for the two points. + * \tparam Point must be a Kernel::Point_d from CGAL. + * + */ + template< typename Kernel = CGAL::Epeck_d, + typename Point= typename Kernel::Point_d> + double operator()(const Point& point_1, const Point& point_2) const { + Kernel kernel_; + return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; + } + + /** \brief Minimal_enclosing_ball_radius from two points. + * + * @param[in] point_1 First point. + * @param[in] point_2 second point. + * @return The minimal enclosing ball radius for the two points (aka. Euclidean distance / 2.). + * + * \tparam Point must be a range of Cartesian coordinates. + * + */ +// template< typename Point > +// typename std::iterator_traits::type>::value_type +// operator()(const Point& point_1, const Point& point_2) const { +// std::clog << "#" << *point_1.begin() << "##" << *point_2.begin() << std::endl; +// return Euclidean_distance()(point_1, point_2) / 2.; +// } + + /** \brief Enclosing ball radius from a point cloud using CGAL. + * + * @param[in] point_cloud The points. + * @return Enclosing ball radius for the points. + * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. + * + */ + template< typename Kernel = CGAL::Epeck_d, + typename Point= typename Kernel::Point_d, + typename Point_cloud = std::vector> + double operator()(const Point_cloud& point_cloud) const { + Kernel kernel_; + return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); + } + + /** \brief Minimal_enclosing_ball_radius from a point cloud. + * + * @param[in] point_cloud The points. + * @return The minimal enclosing ball radius for the points. + * + * \tparam Point_cloud must be a range of points with Cartesian coordinates. + * Point_cloud is a range over a range of Coordinate. + * + */ +// template< typename Point_cloud, +// typename Point_iterator = typename boost::range_const_iterator::type, +// typename Point = typename std::iterator_traits::value_type, +// typename Coordinate_iterator = typename boost::range_const_iterator::type, +// typename Coordinate = typename std::iterator_traits::value_type> +// Coordinate +// operator()(const Point_cloud& point_cloud) const { +// using Min_sphere = Miniball::Miniball>; +// +// Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end()); +// #ifdef DEBUG_TRACES +// std::clog << "Minimal_enclosing_ball_radius = " << std::sqrt(ms.squared_radius()) << " | nb points = " +// << boost::size(point_cloud) << " | dimension = " +// << boost::size(*point_cloud.begin()) << std::endl; +// #endif // DEBUG_TRACES +// +// return std::sqrt(ms.squared_radius()); +// } +}; + +/** + * \class Cech_kernel + * \brief Cech complex kernel container. + * + * \details + * The Cech complex kernel container stores CGAL Kernel and dispatch basic computations. + */ + +// template < typename Kernel > +// class Cech_kernel { +// private: +// // Kernel for functions access. +// Kernel kernel_; +// public: +// using Point_d = typename Kernel::Point_d; +// // Numeric type of coordinates in the kernel +// using FT = typename Kernel::FT; +// // Sphere is a pair of point and squared radius. +// using Sphere = typename std::pair; +// +// int get_dimension(const Point_d& p0) const { +// return kernel_.point_dimension_d_object()(p0); +// } +// +// template +// Sphere get_sphere(PointIterator begin, PointIterator end) const { +// Point_d c = kernel_.construct_circumcenter_d_object()(begin, end); +// FT r = kernel_.squared_distance_d_object()(c, *begin); +// return std::make_pair(std::move(c), std::move(r)); +// } +// +// template +// FT get_squared_radius(PointIterator begin, PointIterator end) const { +// return kernel_.compute_squared_radius_d_object()(begin, end); +// } +// +// FT get_squared_radius(const Sphere& sph) const { +// return sph.second; +// } +// }; + + +//} // namespace cech_complex + +// namespace cechcomplex = cech_complex; + +} // namespace Gudhi + +#endif // CECH_KERNEL_H_ diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index 51b466da..7d8c3c22 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -22,7 +22,7 @@ // to construct Cech_complex from a OFF file of points #include #include -#include +#include #include #include // For EXACT or SAFE version diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp index 0c945cad..ccf63e3e 100644 --- a/src/Cech_complex/utilities/cech_persistence.cpp +++ b/src/Cech_complex/utilities/cech_persistence.cpp @@ -9,7 +9,7 @@ */ #include -#include +#include #include #include #include diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h index a8ee4a75..5e5a1e31 100644 --- a/src/common/include/gudhi/distance_functions.h +++ b/src/common/include/gudhi/distance_functions.h @@ -13,13 +13,9 @@ #include -#include - #include #include -#include - #include // for std::sqrt #include // for std::decay #include // for std::begin, std::end @@ -61,85 +57,6 @@ class Euclidean_distance { } }; -/** @brief Compute the radius of the minimal enclosing ball between Points given by a range of coordinates. - * The points are assumed to have the same dimension. */ -class Minimal_enclosing_ball_radius { - public: - /** \brief Enclosing ball radius from two points using CGAL. - * - * @param[in] point_1 - * @param[in] point_2 - * @return Enclosing ball radius for the two points. - * \tparam Point must be a Kernel::Point_d from CGAL. - * - */ - template< typename Kernel = CGAL::Epeck_d, - typename Point= typename Kernel::Point_d> - double operator()(const Point& point_1, const Point& point_2) const { - Kernel kernel_; - return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; - } - - /** \brief Minimal_enclosing_ball_radius from two points. - * - * @param[in] point_1 First point. - * @param[in] point_2 second point. - * @return The minimal enclosing ball radius for the two points (aka. Euclidean distance / 2.). - * - * \tparam Point must be a range of Cartesian coordinates. - * - */ - template< typename Point > - typename std::iterator_traits::type>::value_type - operator()(const Point& point_1, const Point& point_2) const { - std::clog << "#" << *point_1.begin() << "##" << *point_2.begin() << std::endl; - return Euclidean_distance()(point_1, point_2) / 2.; - } - - /** \brief Enclosing ball radius from a point cloud using CGAL. - * - * @param[in] point_cloud The points. - * @return Enclosing ball radius for the points. - * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. - * - */ - template< typename Kernel = CGAL::Epeck_d, - typename Point= typename Kernel::Point_d, - typename Point_cloud = std::vector> - double operator()(const Point_cloud& point_cloud) const { - Kernel kernel_; - return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); - } - - /** \brief Minimal_enclosing_ball_radius from a point cloud. - * - * @param[in] point_cloud The points. - * @return The minimal enclosing ball radius for the points. - * - * \tparam Point_cloud must be a range of points with Cartesian coordinates. - * Point_cloud is a range over a range of Coordinate. - * - */ - template< typename Point_cloud, - typename Point_iterator = typename boost::range_const_iterator::type, - typename Point = typename std::iterator_traits::value_type, - typename Coordinate_iterator = typename boost::range_const_iterator::type, - typename Coordinate = typename std::iterator_traits::value_type> - Coordinate - operator()(const Point_cloud& point_cloud) const { - using Min_sphere = Miniball::Miniball>; - - Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end()); -#ifdef DEBUG_TRACES - std::clog << "Minimal_enclosing_ball_radius = " << std::sqrt(ms.squared_radius()) << " | nb points = " - << boost::size(point_cloud) << " | dimension = " - << boost::size(*point_cloud.begin()) << std::endl; -#endif // DEBUG_TRACES - - return std::sqrt(ms.squared_radius()); - } -}; - } // namespace Gudhi #endif // DISTANCE_FUNCTIONS_H_ -- cgit v1.2.3 From e9a676d7aa9d27595951f1f4f14ac419641234b4 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 6 Oct 2021 10:19:33 +0200 Subject: Replace CGAL_FOUND flag with version check of CGAL_WITH_EIGEN3_VERSION --- src/Cech_complex/benchmark/CMakeLists.txt | 2 +- src/Cech_complex/example/CMakeLists.txt | 2 +- src/Cech_complex/test/CMakeLists.txt | 2 +- src/Cech_complex/utilities/CMakeLists.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Cech_complex/benchmark/CMakeLists.txt b/src/Cech_complex/benchmark/CMakeLists.txt index ccd1b324..dd7f3f6c 100644 --- a/src/Cech_complex/benchmark/CMakeLists.txt +++ b/src/Cech_complex/benchmark/CMakeLists.txt @@ -1,6 +1,6 @@ project(Cech_complex_benchmark) -if (CGAL_FOUND) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) diff --git a/src/Cech_complex/example/CMakeLists.txt b/src/Cech_complex/example/CMakeLists.txt index 52dec63f..2b6a29a3 100644 --- a/src/Cech_complex/example/CMakeLists.txt +++ b/src/Cech_complex/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Cech_complex_examples) -if (CGAL_FOUND) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) if (TARGET Boost::program_options) add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp ) target_link_libraries(Cech_complex_example_step_by_step Boost::program_options) diff --git a/src/Cech_complex/test/CMakeLists.txt b/src/Cech_complex/test/CMakeLists.txt index 8e725f50..66d796a7 100644 --- a/src/Cech_complex/test/CMakeLists.txt +++ b/src/Cech_complex/test/CMakeLists.txt @@ -1,4 +1,4 @@ -if (CGAL_FOUND) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) include(GUDHI_boost_test) add_executable ( Cech_complex_test_unit test_cech_complex.cpp ) diff --git a/src/Cech_complex/utilities/CMakeLists.txt b/src/Cech_complex/utilities/CMakeLists.txt index efe40e57..3c9c0f73 100644 --- a/src/Cech_complex/utilities/CMakeLists.txt +++ b/src/Cech_complex/utilities/CMakeLists.txt @@ -1,4 +1,4 @@ -if (CGAL_FOUND) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) project(Cech_complex_utilities) if (TARGET Boost::program_options) -- cgit v1.2.3 From 1db88dcd1f5f94d4ab9b560fd0f4399cf00fb304 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 11 Oct 2021 15:09:41 +0200 Subject: Change the minimal version of CGAL_WITH_EIGEN3 to 5.1.0 for cech use (tests, examples, etc) --- src/Cech_complex/benchmark/CMakeLists.txt | 2 +- src/Cech_complex/example/CMakeLists.txt | 2 +- src/Cech_complex/test/CMakeLists.txt | 2 +- src/Cech_complex/utilities/CMakeLists.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Cech_complex/benchmark/CMakeLists.txt b/src/Cech_complex/benchmark/CMakeLists.txt index dd7f3f6c..9c7fd26d 100644 --- a/src/Cech_complex/benchmark/CMakeLists.txt +++ b/src/Cech_complex/benchmark/CMakeLists.txt @@ -1,6 +1,6 @@ project(Cech_complex_benchmark) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) diff --git a/src/Cech_complex/example/CMakeLists.txt b/src/Cech_complex/example/CMakeLists.txt index 2b6a29a3..cdbd44cb 100644 --- a/src/Cech_complex/example/CMakeLists.txt +++ b/src/Cech_complex/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Cech_complex_examples) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) if (TARGET Boost::program_options) add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp ) target_link_libraries(Cech_complex_example_step_by_step Boost::program_options) diff --git a/src/Cech_complex/test/CMakeLists.txt b/src/Cech_complex/test/CMakeLists.txt index 66d796a7..2926a656 100644 --- a/src/Cech_complex/test/CMakeLists.txt +++ b/src/Cech_complex/test/CMakeLists.txt @@ -1,4 +1,4 @@ -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) include(GUDHI_boost_test) add_executable ( Cech_complex_test_unit test_cech_complex.cpp ) diff --git a/src/Cech_complex/utilities/CMakeLists.txt b/src/Cech_complex/utilities/CMakeLists.txt index 3c9c0f73..1a970679 100644 --- a/src/Cech_complex/utilities/CMakeLists.txt +++ b/src/Cech_complex/utilities/CMakeLists.txt @@ -1,4 +1,4 @@ -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) project(Cech_complex_utilities) if (TARGET Boost::program_options) -- cgit v1.2.3 From 7845631fc2b4a32d4ca7c3e37bf49d40c22e226e Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 12 Oct 2021 10:08:10 +0200 Subject: Remove old implementation of Minimal_enclosing_ball_radius operator () --- .../include/gudhi/Cech_complex/Cech_kernel.h | 42 ---------------------- 1 file changed, 42 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h b/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h index 93af90d2..348bb57d 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h +++ b/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h @@ -38,21 +38,6 @@ class Minimal_enclosing_ball_radius { return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; } - /** \brief Minimal_enclosing_ball_radius from two points. - * - * @param[in] point_1 First point. - * @param[in] point_2 second point. - * @return The minimal enclosing ball radius for the two points (aka. Euclidean distance / 2.). - * - * \tparam Point must be a range of Cartesian coordinates. - * - */ -// template< typename Point > -// typename std::iterator_traits::type>::value_type -// operator()(const Point& point_1, const Point& point_2) const { -// std::clog << "#" << *point_1.begin() << "##" << *point_2.begin() << std::endl; -// return Euclidean_distance()(point_1, point_2) / 2.; -// } /** \brief Enclosing ball radius from a point cloud using CGAL. * @@ -69,33 +54,6 @@ class Minimal_enclosing_ball_radius { return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); } - /** \brief Minimal_enclosing_ball_radius from a point cloud. - * - * @param[in] point_cloud The points. - * @return The minimal enclosing ball radius for the points. - * - * \tparam Point_cloud must be a range of points with Cartesian coordinates. - * Point_cloud is a range over a range of Coordinate. - * - */ -// template< typename Point_cloud, -// typename Point_iterator = typename boost::range_const_iterator::type, -// typename Point = typename std::iterator_traits::value_type, -// typename Coordinate_iterator = typename boost::range_const_iterator::type, -// typename Coordinate = typename std::iterator_traits::value_type> -// Coordinate -// operator()(const Point_cloud& point_cloud) const { -// using Min_sphere = Miniball::Miniball>; -// -// Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end()); -// #ifdef DEBUG_TRACES -// std::clog << "Minimal_enclosing_ball_radius = " << std::sqrt(ms.squared_radius()) << " | nb points = " -// << boost::size(point_cloud) << " | dimension = " -// << boost::size(*point_cloud.begin()) << std::endl; -// #endif // DEBUG_TRACES -// -// return std::sqrt(ms.squared_radius()); -// } }; /** -- cgit v1.2.3 From dafc12cf1a71d5515fbeb4a666f312adc7d82d63 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 13 Oct 2021 09:46:26 +0200 Subject: Change version of CGAL_WITH_EIGEN3 to 5.0.1 for cech use --- src/Cech_complex/benchmark/CMakeLists.txt | 2 +- src/Cech_complex/example/CMakeLists.txt | 2 +- src/Cech_complex/test/CMakeLists.txt | 2 +- src/Cech_complex/utilities/CMakeLists.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Cech_complex/benchmark/CMakeLists.txt b/src/Cech_complex/benchmark/CMakeLists.txt index 9c7fd26d..a6b3d70b 100644 --- a/src/Cech_complex/benchmark/CMakeLists.txt +++ b/src/Cech_complex/benchmark/CMakeLists.txt @@ -1,6 +1,6 @@ project(Cech_complex_benchmark) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1) # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) diff --git a/src/Cech_complex/example/CMakeLists.txt b/src/Cech_complex/example/CMakeLists.txt index cdbd44cb..4d11ace2 100644 --- a/src/Cech_complex/example/CMakeLists.txt +++ b/src/Cech_complex/example/CMakeLists.txt @@ -1,6 +1,6 @@ project(Cech_complex_examples) -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1) if (TARGET Boost::program_options) add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp ) target_link_libraries(Cech_complex_example_step_by_step Boost::program_options) diff --git a/src/Cech_complex/test/CMakeLists.txt b/src/Cech_complex/test/CMakeLists.txt index 2926a656..2d736f27 100644 --- a/src/Cech_complex/test/CMakeLists.txt +++ b/src/Cech_complex/test/CMakeLists.txt @@ -1,4 +1,4 @@ -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1) include(GUDHI_boost_test) add_executable ( Cech_complex_test_unit test_cech_complex.cpp ) diff --git a/src/Cech_complex/utilities/CMakeLists.txt b/src/Cech_complex/utilities/CMakeLists.txt index 1a970679..e80a698e 100644 --- a/src/Cech_complex/utilities/CMakeLists.txt +++ b/src/Cech_complex/utilities/CMakeLists.txt @@ -1,4 +1,4 @@ -if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) +if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1) project(Cech_complex_utilities) if (TARGET Boost::program_options) -- cgit v1.2.3 From 160c0a9832f84b6c12e507388982133ec81be278 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 18 Oct 2021 11:30:23 +0200 Subject: changed name differentiation ---> tensorflow and separated functions into different files --- src/python/CMakeLists.txt | 4 +- src/python/gudhi/differentiation/__init__.py | 3 - src/python/gudhi/differentiation/tensorflow.py | 234 --------------------- src/python/gudhi/tensorflow/CubicalLayer.py | 66 ++++++ .../gudhi/tensorflow/LowerStarSimplexTreeLayer.py | 77 +++++++ src/python/gudhi/tensorflow/RipsLayer.py | 75 +++++++ src/python/gudhi/tensorflow/__init__.py | 5 + src/python/test/test_diff.py | 2 +- 8 files changed, 226 insertions(+), 240 deletions(-) delete mode 100644 src/python/gudhi/differentiation/__init__.py delete mode 100644 src/python/gudhi/differentiation/tensorflow.py create mode 100644 src/python/gudhi/tensorflow/CubicalLayer.py create mode 100644 src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py create mode 100644 src/python/gudhi/tensorflow/RipsLayer.py create mode 100644 src/python/gudhi/tensorflow/__init__.py diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 6c17223e..49ec3fea 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -67,7 +67,7 @@ if(PYTHONINTERP_FOUND) set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'euclidean_strong_witness_complex', ") # Modules that should not be auto-imported in __init__.py set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'representations', ") - set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'differentiation', ") + set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'tensorflow', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'wasserstein', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'point_cloud', ") set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'weighted_rips_complex', ") @@ -271,7 +271,7 @@ if(PYTHONINTERP_FOUND) file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/representations" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi/") file(COPY "gudhi/wasserstein" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") - file(COPY "gudhi/differentiation" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") + file(COPY "gudhi/tensorflow" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") file(COPY "gudhi/clustering" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi" FILES_MATCHING PATTERN "*.py") file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi") diff --git a/src/python/gudhi/differentiation/__init__.py b/src/python/gudhi/differentiation/__init__.py deleted file mode 100644 index 3b7790e4..00000000 --- a/src/python/gudhi/differentiation/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .tensorflow import * - -__all__ = ["LowerStarSimplexTreeLayer", "RipsLayer", "CubicalLayer"] diff --git a/src/python/gudhi/differentiation/tensorflow.py b/src/python/gudhi/differentiation/tensorflow.py deleted file mode 100644 index 15d5811e..00000000 --- a/src/python/gudhi/differentiation/tensorflow.py +++ /dev/null @@ -1,234 +0,0 @@ -import numpy as np -import tensorflow as tf -from ..rips_complex import RipsComplex -from ..cubical_complex import CubicalComplex - -# In this file, we write functions based on the Gudhi library that compute persistence diagrams associated to -# different filtrations (lower star, Rips, cubical), as well as the corresponding positive and negative -# simplices. We also wrap these functions into Tensorflow models. - - - -######################################### -# Lower star filtration on simplex tree # -######################################### - -# The parameters of the model are the vertex function values of the simplex tree. - -def _LowerStarSimplexTree(simplextree, filtration, dimension): - # Parameters: simplextree (simplex tree on which to compute persistence) - # filtration (function values on the vertices of st), - # dimension (homology dimension), - - for s,_ in simplextree.get_filtration(): - simplextree.assign_filtration(s, -1e10) - - # Assign new filtration values - for i in range(simplextree.num_vertices()): - simplextree.assign_filtration([i], filtration[i]) - simplextree.make_filtration_non_decreasing() - - # Compute persistence diagram - dgm = simplextree.persistence() - - # Get vertex pairs for optimization. First, get all simplex pairs - pairs = simplextree.persistence_pairs() - - # Then, loop over all simplex pairs - indices, pers = [], [] - for s1, s2 in pairs: - # Select pairs with good homological dimension and finite lifetime - if len(s1) == dimension+1 and len(s2) > 0: - # Get IDs of the vertices corresponding to the filtration values of the simplices - l1, l2 = np.array(s1), np.array(s2) - i1 = l1[np.argmax(filtration[l1])] - i2 = l2[np.argmax(filtration[l2])] - indices.append(i1) - indices.append(i2) - # Compute lifetime - pers.append(simplextree.filtration(s2)-simplextree.filtration(s1)) - - # Sort vertex pairs wrt lifetime - perm = np.argsort(pers) - indices = np.reshape(indices, [-1,2])[perm][::-1,:].flatten() - - return np.array(indices, dtype=np.int32) - -class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): - """ - TensorFlow layer for computing lower-star persistence out of a simplex tree - - Attributes: - simplextree (gudhi.SimplexTree()): underlying simplex tree - dimension (int): homology dimension - """ - def __init__(self, simplextree, dimension=0, **kwargs): - super().__init__(dynamic=True, **kwargs) - self.dimension = dimension - self.simplextree = simplextree - - def build(self): - super.build() - - def call(self, filtration): - """ - Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree - - Parameters: - F (TensorFlow variable): filter function values over the vertices of the simplex tree - """ - # Don't try to compute gradients for the vertex pairs - indices = tf.stop_gradient(_LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimension)) - # Get persistence diagram - self.dgm = tf.reshape(tf.gather(filtration, indices), [-1,2]) - return self.dgm - - - - - - - - - -############################ -# Vietoris-Rips filtration # -############################ - -# The parameters of the model are the point coordinates. - -def _Rips(DX, max_edge, dimension): - # Parameters: DX (distance matrix), - # max_edge (maximum edge length for Rips filtration), - # dimension (homology dimension) - - # Compute the persistence pairs with Gudhi - rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) - st = rc.create_simplex_tree(max_dimension=dimension+1) - dgm = st.persistence() - pairs = st.persistence_pairs() - - # Retrieve vertices v_a and v_b by picking the ones achieving the maximal - # distance among all pairwise distances between the simplex vertices - indices, pers = [], [] - for s1, s2 in pairs: - if len(s1) == dimension+1 and len(s2) > 0: - l1, l2 = np.array(s1), np.array(s2) - i1 = [l1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(l1), len(l1)])] - i2 = [l2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(l2), len(l2)])] - indices.append(i1) - indices.append(i2) - pers.append(st.filtration(s2)-st.filtration(s1)) - - # Sort points with distance-to-diagonal - perm = np.argsort(pers) - indices = np.reshape(indices, [-1,4])[perm][::-1,:].flatten() - - return np.array(indices, dtype=np.int32) - -class RipsLayer(tf.keras.layers.Layer): - """ - TensorFlow layer for computing Rips persistence out of a point cloud - - Attributes: - maximum_edge_length (float): maximum edge length for the Rips complex - dimension (int): homology dimension - """ - def __init__(self, maximum_edge_length=12, dimension=1, **kwargs): - super().__init__(dynamic=True, **kwargs) - self.max_edge = maximum_edge_length - self.dimension = dimension - - def build(self): - super.build() - - def call(self, X): - """ - Compute Rips persistence diagram associated to a point cloud - - Parameters: - X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] - """ - # Compute distance matrix - DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) - # Compute vertices associated to positive and negative simplices - # Don't compute gradient for this operation - indices = tf.stop_gradient(_Rips(DX.numpy(), self.max_edge, self.dimension)) - # Get persistence diagram by simply picking the corresponding entries in the distance matrix - if self.dimension > 0: - dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(indices, [-1,2])), [-1,2]) - else: - indices = tf.reshape(indices, [-1,2])[1::2,:] - dgm = tf.concat([tf.zeros([indices.shape[0],1]), tf.reshape(tf.gather_nd(DX, indices), [-1,1])], axis=1) - return dgm - - - - - - - - - -###################### -# Cubical filtration # -###################### - -# The parameters of the model are the pixel values. - -def _Cubical(X, dimension): - # Parameters: X (image), - # dimension (homology dimension) - - # Compute the persistence pairs with Gudhi - cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) - cc.persistence() - try: - cof = cc.cofaces_of_persistence_pairs()[0][dimension] - except IndexError: - cof = np.array([]) - - if len(cof) > 0: - # Sort points with distance-to-diagonal - Xs = X.shape - pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] - perm = np.argsort(pers) - cof = cof[perm[::-1]] - - # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices - D = len(Xs) if len(cof) > 0 else 1 - ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) - count = 0 - for idx in range(0,2*cof.shape[0],2): - ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) - ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) - count += 1 - return np.array(ocof, dtype=np.int32) - -class CubicalLayer(tf.keras.layers.Layer): - """ - TensorFlow layer for computing cubical persistence out of a cubical complex - - Attributes: - dimension (int): homology dimension - """ - def __init__(self, dimension=1, **kwargs): - super().__init__(dynamic=True, **kwargs) - self.dimension = dimension - - def build(self): - super.build() - - def call(self, X): - """ - Compute persistence diagram associated to a cubical complex filtered by some pixel values - - Parameters: - X (TensorFlow variable): pixel values of the cubical complex - """ - # Compute pixels associated to positive and negative simplices - # Don't compute gradient for this operation - indices = tf.stop_gradient(_Cubical(X.numpy(), self.dimension)) - # Get persistence diagram by simply picking the corresponding entries in the image - dgm = tf.reshape(tf.gather_nd(X, tf.reshape(indices, [-1,len(X.shape)])), [-1,2]) - return dgm diff --git a/src/python/gudhi/tensorflow/CubicalLayer.py b/src/python/gudhi/tensorflow/CubicalLayer.py new file mode 100644 index 00000000..e36adec5 --- /dev/null +++ b/src/python/gudhi/tensorflow/CubicalLayer.py @@ -0,0 +1,66 @@ +import numpy as np +import tensorflow as tf +from ..cubical_complex import CubicalComplex + +###################### +# Cubical filtration # +###################### + +# The parameters of the model are the pixel values. + +def _Cubical(X, dimension): + # Parameters: X (image), + # dimension (homology dimension) + + # Compute the persistence pairs with Gudhi + cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) + cc.persistence() + try: + cof = cc.cofaces_of_persistence_pairs()[0][dimension] + except IndexError: + cof = np.array([]) + + if len(cof) > 0: + # Sort points with distance-to-diagonal + Xs = X.shape + pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] + perm = np.argsort(pers) + cof = cof[perm[::-1]] + + # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices + D = len(Xs) if len(cof) > 0 else 1 + ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) + count = 0 + for idx in range(0,2*cof.shape[0],2): + ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) + ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) + count += 1 + return np.array(ocof, dtype=np.int32) + +class CubicalLayer(tf.keras.layers.Layer): + """ + TensorFlow layer for computing cubical persistence out of a cubical complex + + Attributes: + dimension (int): homology dimension + """ + def __init__(self, dimension=1, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.dimension = dimension + + def build(self): + super.build() + + def call(self, X): + """ + Compute persistence diagram associated to a cubical complex filtered by some pixel values + + Parameters: + X (TensorFlow variable): pixel values of the cubical complex + """ + # Compute pixels associated to positive and negative simplices + # Don't compute gradient for this operation + indices = tf.stop_gradient(_Cubical(X.numpy(), self.dimension)) + # Get persistence diagram by simply picking the corresponding entries in the image + dgm = tf.reshape(tf.gather_nd(X, tf.reshape(indices, [-1,len(X.shape)])), [-1,2]) + return dgm diff --git a/src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py b/src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py new file mode 100644 index 00000000..fc963d2f --- /dev/null +++ b/src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py @@ -0,0 +1,77 @@ +import numpy as np +import tensorflow as tf + +######################################### +# Lower star filtration on simplex tree # +######################################### + +# The parameters of the model are the vertex function values of the simplex tree. + +def _LowerStarSimplexTree(simplextree, filtration, dimension): + # Parameters: simplextree (simplex tree on which to compute persistence) + # filtration (function values on the vertices of st), + # dimension (homology dimension), + + for s,_ in simplextree.get_filtration(): + simplextree.assign_filtration(s, -1e10) + + # Assign new filtration values + for i in range(simplextree.num_vertices()): + simplextree.assign_filtration([i], filtration[i]) + simplextree.make_filtration_non_decreasing() + + # Compute persistence diagram + dgm = simplextree.persistence() + + # Get vertex pairs for optimization. First, get all simplex pairs + pairs = simplextree.persistence_pairs() + + # Then, loop over all simplex pairs + indices, pers = [], [] + for s1, s2 in pairs: + # Select pairs with good homological dimension and finite lifetime + if len(s1) == dimension+1 and len(s2) > 0: + # Get IDs of the vertices corresponding to the filtration values of the simplices + l1, l2 = np.array(s1), np.array(s2) + i1 = l1[np.argmax(filtration[l1])] + i2 = l2[np.argmax(filtration[l2])] + indices.append(i1) + indices.append(i2) + # Compute lifetime + pers.append(simplextree.filtration(s2)-simplextree.filtration(s1)) + + # Sort vertex pairs wrt lifetime + perm = np.argsort(pers) + indices = np.reshape(indices, [-1,2])[perm][::-1,:].flatten() + + return np.array(indices, dtype=np.int32) + +class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): + """ + TensorFlow layer for computing lower-star persistence out of a simplex tree + + Attributes: + simplextree (gudhi.SimplexTree()): underlying simplex tree + dimension (int): homology dimension + """ + def __init__(self, simplextree, dimension=0, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.dimension = dimension + self.simplextree = simplextree + + def build(self): + super.build() + + def call(self, filtration): + """ + Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree + + Parameters: + F (TensorFlow variable): filter function values over the vertices of the simplex tree + """ + # Don't try to compute gradients for the vertex pairs + indices = tf.stop_gradient(_LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimension)) + # Get persistence diagram + self.dgm = tf.reshape(tf.gather(filtration, indices), [-1,2]) + return self.dgm + diff --git a/src/python/gudhi/tensorflow/RipsLayer.py b/src/python/gudhi/tensorflow/RipsLayer.py new file mode 100644 index 00000000..373e021e --- /dev/null +++ b/src/python/gudhi/tensorflow/RipsLayer.py @@ -0,0 +1,75 @@ +import numpy as np +import tensorflow as tf +from ..rips_complex import RipsComplex + +############################ +# Vietoris-Rips filtration # +############################ + +# The parameters of the model are the point coordinates. + +def _Rips(DX, max_edge, dimension): + # Parameters: DX (distance matrix), + # max_edge (maximum edge length for Rips filtration), + # dimension (homology dimension) + + # Compute the persistence pairs with Gudhi + rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) + st = rc.create_simplex_tree(max_dimension=dimension+1) + dgm = st.persistence() + pairs = st.persistence_pairs() + + # Retrieve vertices v_a and v_b by picking the ones achieving the maximal + # distance among all pairwise distances between the simplex vertices + indices, pers = [], [] + for s1, s2 in pairs: + if len(s1) == dimension+1 and len(s2) > 0: + l1, l2 = np.array(s1), np.array(s2) + i1 = [l1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(l1), len(l1)])] + i2 = [l2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(l2), len(l2)])] + indices.append(i1) + indices.append(i2) + pers.append(st.filtration(s2)-st.filtration(s1)) + + # Sort points with distance-to-diagonal + perm = np.argsort(pers) + indices = np.reshape(indices, [-1,4])[perm][::-1,:].flatten() + + return np.array(indices, dtype=np.int32) + +class RipsLayer(tf.keras.layers.Layer): + """ + TensorFlow layer for computing Rips persistence out of a point cloud + + Attributes: + maximum_edge_length (float): maximum edge length for the Rips complex + dimension (int): homology dimension + """ + def __init__(self, maximum_edge_length=12, dimension=1, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.max_edge = maximum_edge_length + self.dimension = dimension + + def build(self): + super.build() + + def call(self, X): + """ + Compute Rips persistence diagram associated to a point cloud + + Parameters: + X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] + """ + # Compute distance matrix + DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) + # Compute vertices associated to positive and negative simplices + # Don't compute gradient for this operation + indices = tf.stop_gradient(_Rips(DX.numpy(), self.max_edge, self.dimension)) + # Get persistence diagram by simply picking the corresponding entries in the distance matrix + if self.dimension > 0: + dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(indices, [-1,2])), [-1,2]) + else: + indices = tf.reshape(indices, [-1,2])[1::2,:] + dgm = tf.concat([tf.zeros([indices.shape[0],1]), tf.reshape(tf.gather_nd(DX, indices), [-1,1])], axis=1) + return dgm + diff --git a/src/python/gudhi/tensorflow/__init__.py b/src/python/gudhi/tensorflow/__init__.py new file mode 100644 index 00000000..47335a25 --- /dev/null +++ b/src/python/gudhi/tensorflow/__init__.py @@ -0,0 +1,5 @@ +from .CubicalLayer import CubicalLayer +from .LowerStarSimplexTreeLayer import LowerStarSimplexTreeLayer +from .RipsLayer import RipsLayer + +__all__ = ["LowerStarSimplexTreeLayer", "RipsLayer", "CubicalLayer"] diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index 129b9f03..73a03697 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -1,4 +1,4 @@ -from gudhi.differentiation import * +from gudhi.tensorflow import * import numpy as np import tensorflow as tf import gudhi as gd -- cgit v1.2.3 From fc1c33d19c7d50d01bacb61529badbde8217ce7e Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Tue, 19 Oct 2021 12:27:06 +0200 Subject: update on python documentation --- src/python/doc/cubical_complex_sum.inc | 25 ++++++----- src/python/doc/cubical_complex_tflow_itf_ref.rst | 33 ++++++++++++++ src/python/doc/differentiation.rst | 18 -------- src/python/doc/differentiation_sum.inc | 22 ++++----- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 57 ++++++++++++++++++++++++ src/python/doc/rips_complex_sum.inc | 5 ++- src/python/doc/rips_complex_tflow_itf_ref.rst | 33 ++++++++++++++ src/python/doc/simplex_tree_sum.inc | 23 +++++----- 8 files changed, 163 insertions(+), 53 deletions(-) create mode 100644 src/python/doc/cubical_complex_tflow_itf_ref.rst delete mode 100644 src/python/doc/differentiation.rst create mode 100644 src/python/doc/ls_simplex_tree_tflow_itf_ref.rst create mode 100644 src/python/doc/rips_complex_tflow_itf_ref.rst diff --git a/src/python/doc/cubical_complex_sum.inc b/src/python/doc/cubical_complex_sum.inc index 87db184d..90ec9fc2 100644 --- a/src/python/doc/cubical_complex_sum.inc +++ b/src/python/doc/cubical_complex_sum.inc @@ -1,14 +1,17 @@ .. table:: :widths: 30 40 30 - +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ - | .. figure:: | The cubical complex represents a grid as a cell complex with | :Author: Pawel Dlotko | - | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | cells of all dimensions. | | - | :alt: Cubical complex representation | | :Since: GUDHI 2.0.0 | - | :figclass: align-center | | | - | | | :License: MIT | - | | | | - +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+ - | * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | - | | * :doc:`periodic_cubical_complex_ref` | - +--------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ + +--------------------------------------------------------------------------+----------------------------------------------------------------------+---------------------------------------------------------+ + | .. figure:: | The cubical complex represents a grid as a cell complex with | :Author: Pawel Dlotko | + | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | cells of all dimensions. | | + | :alt: Cubical complex representation | | :Since: GUDHI 2.0.0 | + | :figclass: align-center | | | + | | | :License: MIT | + | | | | + +--------------------------------------------------------------------------+----------------------------------------------------------------------+---------------------------------------------------------+ + | * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` | + | | * :doc:`periodic_cubical_complex_ref` | + +--------------------------------------------------------------------------+----------------------------------------------------------------------+---------------------------------------------------------+ + | | * :doc:`cubical_complex_tflow_itf_ref` | :requires: `TensorFlow `_ | + | | | | + +--------------------------------------------------------------------------+----------------------------------------------------------------------+---------------------------------------------------------+ diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst new file mode 100644 index 00000000..8991b638 --- /dev/null +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -0,0 +1,33 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +TensorFlow layer for cubical persistence +######################################## + +.. include:: differentiation_sum.inc + +Example of gradient computed from cubical persistence +----------------------------------------------------- + +.. code-block:: python + from gudhi.tensorflow import * + import numpy as np + import tensorflow as tf + + Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32) + X = tf.Variable(initial_value=Xinit, trainable=True) + cl = CubicalLayer(dimension=0) + + with tf.GradientTape() as tape: + dgm = cl.call(X) + loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) + +Documentation for CubicalLayer +------------------------------ + +.. autoclass:: gudhi.tensorflow.CubicalLayer + :members: + :special-members: __init__ + :show-inheritance: diff --git a/src/python/doc/differentiation.rst b/src/python/doc/differentiation.rst deleted file mode 100644 index 906a9965..00000000 --- a/src/python/doc/differentiation.rst +++ /dev/null @@ -1,18 +0,0 @@ -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -====================== -Differentiation manual -====================== - -.. include:: differentiation_sum.inc - -In this module, we provide neural network models for computing persistent homology. In particular, we provide TensorFlow 2 models that allow to compute persistence diagrams from complexes available in the Gudhi library, including simplex trees, cubical complexes and Vietoris-Rips complexes. These models can be incorporated at each step of a given neural network architecture, and can be used in addition to `PersLay `_ to produce topological features. - -TensorFlow models ------------------ -.. automodule:: gudhi.differentiation - :members: - :special-members: - :show-inheritance: diff --git a/src/python/doc/differentiation_sum.inc b/src/python/doc/differentiation_sum.inc index 30188e0b..7340d24d 100644 --- a/src/python/doc/differentiation_sum.inc +++ b/src/python/doc/differentiation_sum.inc @@ -1,14 +1,10 @@ -.. table:: - :widths: 30 40 30 +. list-table:: + :widths: 40 30 30 + :header-rows: 0 - +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+ - | .. figure:: | Deep learning models for differentiating persistence diagrams. | :Author: Mathieu Carrière | - | img/ripsTF.png | | | - | | | :Since: GUDHI 3.1.0 | - | | | | - | | | :License: MIT | - | | | | - | | | :Requires: `TensorFlow 2 `_ | - +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+ - | * :doc:`differentiation` | - +------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------+ + * - :Since: GUDHI 3.5.0 + - :License: MIT + - :Requires: `TensorFlow `_ + +We provide TensorFlow 2 models that can handle automatic differentiation for the computation of persistence diagrams from complexes available in the Gudhi library. +This includes simplex trees, cubical complexes and Vietoris-Rips complexes. diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst new file mode 100644 index 00000000..bb9c61c6 --- /dev/null +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -0,0 +1,57 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +TensorFlow layer for lower-star persistence on simplex trees +############################################################ + +.. include:: differentiation_sum.inc + +Example of gradient computed from lower-star filtration of a simplex tree +------------------------------------------------------------------------- + +.. code-block:: python + from gudhi.tensorflow import * + import numpy as np + import tensorflow as tf + import gudhi as gd + + st = gd.SimplexTree() + st.insert([0]) + st.insert([1]) + st.insert([2]) + st.insert([3]) + st.insert([4]) + st.insert([5]) + st.insert([6]) + st.insert([7]) + st.insert([8]) + st.insert([9]) + st.insert([10]) + st.insert([0, 1]) + st.insert([1, 2]) + st.insert([2, 3]) + st.insert([3, 4]) + st.insert([4, 5]) + st.insert([5, 6]) + st.insert([6, 7]) + st.insert([7, 8]) + st.insert([8, 9]) + st.insert([9, 10]) + + Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) + F = tf.Variable(initial_value=Finit, trainable=True) + sl = LowerStarSimplexTreeLayer(simplextree=st, dimension=0) + + with tf.GradientTape() as tape: + dgm = sl.call(F) + loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [F]) + +Documentation for LowerStarSimplexTreeLayer +------------------------------------------- + +.. autoclass:: gudhi.tensorflow.LowerStarSimplexTreeLayer + :members: + :special-members: __init__ + :show-inheritance: diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc index 2cb24990..6931ebee 100644 --- a/src/python/doc/rips_complex_sum.inc +++ b/src/python/doc/rips_complex_sum.inc @@ -11,4 +11,7 @@ | | | | +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+ | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` | - +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+ + | | * :doc:`rips_complex_tflow_itf_ref` | :requires: `TensorFlow `_ | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+ diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst new file mode 100644 index 00000000..db8c64ff --- /dev/null +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -0,0 +1,33 @@ +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +TensorFlow layer for Vietoris-Rips persistence +############################################## + +.. include:: differentiation_sum.inc + +Example of gradient computed from Vietoris-Rips persistence +----------------------------------------------------------- + +.. code-block:: python + from gudhi.tensorflow import * + import numpy as np + import tensorflow as tf + + Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32) + X = tf.Variable(initial_value=Xinit, trainable=True) + rl = RipsLayer(maximum_edge_length=2., dimension=0) + + with tf.GradientTape() as tape: + dgm = rl.call(X) + loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) + +Documentation for RipsLayer +--------------------------- + +.. autoclass:: gudhi.tensorflow.RipsLayer + :members: + :special-members: __init__ + :show-inheritance: diff --git a/src/python/doc/simplex_tree_sum.inc b/src/python/doc/simplex_tree_sum.inc index a8858f16..3ad1292c 100644 --- a/src/python/doc/simplex_tree_sum.inc +++ b/src/python/doc/simplex_tree_sum.inc @@ -1,13 +1,16 @@ .. table:: :widths: 30 40 30 - +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ - | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria | - | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | | - | :alt: Simplex tree representation | | :Since: GUDHI 2.0.0 | - | :figclass: align-center | The data structure is described in | | - | | :cite:`boissonnatmariasimplextreealgorithmica` | :License: MIT | - | | | | - +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+ - | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` | - +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------+ + +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+ + | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria | + | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | | + | :alt: Simplex tree representation | | :Since: GUDHI 2.0.0 | + | :figclass: align-center | The data structure is described in | | + | | :cite:`boissonnatmariasimplextreealgorithmica` | :License: MIT | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+ + | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` | + +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+ + | | * :doc:`ls_simplex_tree_tflow_itf_ref` | :requires: `TensorFlow `_ | + | | | | + +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+ -- cgit v1.2.3 From 423c4be21968fd42c5470a9132d0e332c73ec2b9 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Tue, 19 Oct 2021 14:37:39 +0200 Subject: fix python documentation --- src/python/doc/cubical_complex_tflow_itf_ref.rst | 2 ++ src/python/doc/differentiation_sum.inc | 5 +++-- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 2 ++ src/python/doc/rips_complex_tflow_itf_ref.rst | 2 ++ 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index 8991b638..e85cfdc6 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -11,6 +11,7 @@ Example of gradient computed from cubical persistence ----------------------------------------------------- .. code-block:: python + from gudhi.tensorflow import * import numpy as np import tensorflow as tf @@ -23,6 +24,7 @@ Example of gradient computed from cubical persistence dgm = cl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) + print(grads[0].numpy()) Documentation for CubicalLayer ------------------------------ diff --git a/src/python/doc/differentiation_sum.inc b/src/python/doc/differentiation_sum.inc index 7340d24d..3dd8e59c 100644 --- a/src/python/doc/differentiation_sum.inc +++ b/src/python/doc/differentiation_sum.inc @@ -1,4 +1,4 @@ -. list-table:: +.. list-table:: :widths: 40 30 30 :header-rows: 0 @@ -7,4 +7,5 @@ - :Requires: `TensorFlow `_ We provide TensorFlow 2 models that can handle automatic differentiation for the computation of persistence diagrams from complexes available in the Gudhi library. -This includes simplex trees, cubical complexes and Vietoris-Rips complexes. +This includes simplex trees, cubical complexes and Vietoris-Rips complexes. Detailed example on how to use these layers in practice are available +in the following `notebook `_. diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index bb9c61c6..7baf611c 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -11,6 +11,7 @@ Example of gradient computed from lower-star filtration of a simplex tree ------------------------------------------------------------------------- .. code-block:: python + from gudhi.tensorflow import * import numpy as np import tensorflow as tf @@ -47,6 +48,7 @@ Example of gradient computed from lower-star filtration of a simplex tree dgm = sl.call(F) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [F]) + print(grads[0].numpy()) Documentation for LowerStarSimplexTreeLayer ------------------------------------------- diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index db8c64ff..15ba4c8e 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -11,6 +11,7 @@ Example of gradient computed from Vietoris-Rips persistence ----------------------------------------------------------- .. code-block:: python + from gudhi.tensorflow import * import numpy as np import tensorflow as tf @@ -23,6 +24,7 @@ Example of gradient computed from Vietoris-Rips persistence dgm = rl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) + print(grads[0].numpy()) Documentation for RipsLayer --------------------------- -- cgit v1.2.3 From 10be82856aee6eb7f4e704757b70c9dab6fe28b8 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Tue, 19 Oct 2021 16:09:08 +0200 Subject: cleanup --- src/python/doc/img/ripsTF.png | Bin 38696 -> 0 bytes src/python/gudhi/tensorflow/CubicalLayer.py | 66 ------------------ .../gudhi/tensorflow/LowerStarSimplexTreeLayer.py | 77 --------------------- src/python/gudhi/tensorflow/RipsLayer.py | 75 -------------------- src/python/gudhi/tensorflow/__init__.py | 6 +- src/python/gudhi/tensorflow/cubical_layer.py | 66 ++++++++++++++++++ .../tensorflow/lower_star_simplex_tree_layer.py | 77 +++++++++++++++++++++ src/python/gudhi/tensorflow/rips_layer.py | 75 ++++++++++++++++++++ 8 files changed, 221 insertions(+), 221 deletions(-) delete mode 100644 src/python/doc/img/ripsTF.png delete mode 100644 src/python/gudhi/tensorflow/CubicalLayer.py delete mode 100644 src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py delete mode 100644 src/python/gudhi/tensorflow/RipsLayer.py create mode 100644 src/python/gudhi/tensorflow/cubical_layer.py create mode 100644 src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py create mode 100644 src/python/gudhi/tensorflow/rips_layer.py diff --git a/src/python/doc/img/ripsTF.png b/src/python/doc/img/ripsTF.png deleted file mode 100644 index 3c5c77a7..00000000 Binary files a/src/python/doc/img/ripsTF.png and /dev/null differ diff --git a/src/python/gudhi/tensorflow/CubicalLayer.py b/src/python/gudhi/tensorflow/CubicalLayer.py deleted file mode 100644 index e36adec5..00000000 --- a/src/python/gudhi/tensorflow/CubicalLayer.py +++ /dev/null @@ -1,66 +0,0 @@ -import numpy as np -import tensorflow as tf -from ..cubical_complex import CubicalComplex - -###################### -# Cubical filtration # -###################### - -# The parameters of the model are the pixel values. - -def _Cubical(X, dimension): - # Parameters: X (image), - # dimension (homology dimension) - - # Compute the persistence pairs with Gudhi - cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) - cc.persistence() - try: - cof = cc.cofaces_of_persistence_pairs()[0][dimension] - except IndexError: - cof = np.array([]) - - if len(cof) > 0: - # Sort points with distance-to-diagonal - Xs = X.shape - pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] - perm = np.argsort(pers) - cof = cof[perm[::-1]] - - # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices - D = len(Xs) if len(cof) > 0 else 1 - ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) - count = 0 - for idx in range(0,2*cof.shape[0],2): - ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) - ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) - count += 1 - return np.array(ocof, dtype=np.int32) - -class CubicalLayer(tf.keras.layers.Layer): - """ - TensorFlow layer for computing cubical persistence out of a cubical complex - - Attributes: - dimension (int): homology dimension - """ - def __init__(self, dimension=1, **kwargs): - super().__init__(dynamic=True, **kwargs) - self.dimension = dimension - - def build(self): - super.build() - - def call(self, X): - """ - Compute persistence diagram associated to a cubical complex filtered by some pixel values - - Parameters: - X (TensorFlow variable): pixel values of the cubical complex - """ - # Compute pixels associated to positive and negative simplices - # Don't compute gradient for this operation - indices = tf.stop_gradient(_Cubical(X.numpy(), self.dimension)) - # Get persistence diagram by simply picking the corresponding entries in the image - dgm = tf.reshape(tf.gather_nd(X, tf.reshape(indices, [-1,len(X.shape)])), [-1,2]) - return dgm diff --git a/src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py b/src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py deleted file mode 100644 index fc963d2f..00000000 --- a/src/python/gudhi/tensorflow/LowerStarSimplexTreeLayer.py +++ /dev/null @@ -1,77 +0,0 @@ -import numpy as np -import tensorflow as tf - -######################################### -# Lower star filtration on simplex tree # -######################################### - -# The parameters of the model are the vertex function values of the simplex tree. - -def _LowerStarSimplexTree(simplextree, filtration, dimension): - # Parameters: simplextree (simplex tree on which to compute persistence) - # filtration (function values on the vertices of st), - # dimension (homology dimension), - - for s,_ in simplextree.get_filtration(): - simplextree.assign_filtration(s, -1e10) - - # Assign new filtration values - for i in range(simplextree.num_vertices()): - simplextree.assign_filtration([i], filtration[i]) - simplextree.make_filtration_non_decreasing() - - # Compute persistence diagram - dgm = simplextree.persistence() - - # Get vertex pairs for optimization. First, get all simplex pairs - pairs = simplextree.persistence_pairs() - - # Then, loop over all simplex pairs - indices, pers = [], [] - for s1, s2 in pairs: - # Select pairs with good homological dimension and finite lifetime - if len(s1) == dimension+1 and len(s2) > 0: - # Get IDs of the vertices corresponding to the filtration values of the simplices - l1, l2 = np.array(s1), np.array(s2) - i1 = l1[np.argmax(filtration[l1])] - i2 = l2[np.argmax(filtration[l2])] - indices.append(i1) - indices.append(i2) - # Compute lifetime - pers.append(simplextree.filtration(s2)-simplextree.filtration(s1)) - - # Sort vertex pairs wrt lifetime - perm = np.argsort(pers) - indices = np.reshape(indices, [-1,2])[perm][::-1,:].flatten() - - return np.array(indices, dtype=np.int32) - -class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): - """ - TensorFlow layer for computing lower-star persistence out of a simplex tree - - Attributes: - simplextree (gudhi.SimplexTree()): underlying simplex tree - dimension (int): homology dimension - """ - def __init__(self, simplextree, dimension=0, **kwargs): - super().__init__(dynamic=True, **kwargs) - self.dimension = dimension - self.simplextree = simplextree - - def build(self): - super.build() - - def call(self, filtration): - """ - Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree - - Parameters: - F (TensorFlow variable): filter function values over the vertices of the simplex tree - """ - # Don't try to compute gradients for the vertex pairs - indices = tf.stop_gradient(_LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimension)) - # Get persistence diagram - self.dgm = tf.reshape(tf.gather(filtration, indices), [-1,2]) - return self.dgm - diff --git a/src/python/gudhi/tensorflow/RipsLayer.py b/src/python/gudhi/tensorflow/RipsLayer.py deleted file mode 100644 index 373e021e..00000000 --- a/src/python/gudhi/tensorflow/RipsLayer.py +++ /dev/null @@ -1,75 +0,0 @@ -import numpy as np -import tensorflow as tf -from ..rips_complex import RipsComplex - -############################ -# Vietoris-Rips filtration # -############################ - -# The parameters of the model are the point coordinates. - -def _Rips(DX, max_edge, dimension): - # Parameters: DX (distance matrix), - # max_edge (maximum edge length for Rips filtration), - # dimension (homology dimension) - - # Compute the persistence pairs with Gudhi - rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) - st = rc.create_simplex_tree(max_dimension=dimension+1) - dgm = st.persistence() - pairs = st.persistence_pairs() - - # Retrieve vertices v_a and v_b by picking the ones achieving the maximal - # distance among all pairwise distances between the simplex vertices - indices, pers = [], [] - for s1, s2 in pairs: - if len(s1) == dimension+1 and len(s2) > 0: - l1, l2 = np.array(s1), np.array(s2) - i1 = [l1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(l1), len(l1)])] - i2 = [l2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(l2), len(l2)])] - indices.append(i1) - indices.append(i2) - pers.append(st.filtration(s2)-st.filtration(s1)) - - # Sort points with distance-to-diagonal - perm = np.argsort(pers) - indices = np.reshape(indices, [-1,4])[perm][::-1,:].flatten() - - return np.array(indices, dtype=np.int32) - -class RipsLayer(tf.keras.layers.Layer): - """ - TensorFlow layer for computing Rips persistence out of a point cloud - - Attributes: - maximum_edge_length (float): maximum edge length for the Rips complex - dimension (int): homology dimension - """ - def __init__(self, maximum_edge_length=12, dimension=1, **kwargs): - super().__init__(dynamic=True, **kwargs) - self.max_edge = maximum_edge_length - self.dimension = dimension - - def build(self): - super.build() - - def call(self, X): - """ - Compute Rips persistence diagram associated to a point cloud - - Parameters: - X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] - """ - # Compute distance matrix - DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) - # Compute vertices associated to positive and negative simplices - # Don't compute gradient for this operation - indices = tf.stop_gradient(_Rips(DX.numpy(), self.max_edge, self.dimension)) - # Get persistence diagram by simply picking the corresponding entries in the distance matrix - if self.dimension > 0: - dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(indices, [-1,2])), [-1,2]) - else: - indices = tf.reshape(indices, [-1,2])[1::2,:] - dgm = tf.concat([tf.zeros([indices.shape[0],1]), tf.reshape(tf.gather_nd(DX, indices), [-1,1])], axis=1) - return dgm - diff --git a/src/python/gudhi/tensorflow/__init__.py b/src/python/gudhi/tensorflow/__init__.py index 47335a25..1599cf52 100644 --- a/src/python/gudhi/tensorflow/__init__.py +++ b/src/python/gudhi/tensorflow/__init__.py @@ -1,5 +1,5 @@ -from .CubicalLayer import CubicalLayer -from .LowerStarSimplexTreeLayer import LowerStarSimplexTreeLayer -from .RipsLayer import RipsLayer +from .cubical_layer import CubicalLayer +from .lower_star_simplex_tree_layer import LowerStarSimplexTreeLayer +from .rips_layer import RipsLayer __all__ = ["LowerStarSimplexTreeLayer", "RipsLayer", "CubicalLayer"] diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py new file mode 100644 index 00000000..e36adec5 --- /dev/null +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -0,0 +1,66 @@ +import numpy as np +import tensorflow as tf +from ..cubical_complex import CubicalComplex + +###################### +# Cubical filtration # +###################### + +# The parameters of the model are the pixel values. + +def _Cubical(X, dimension): + # Parameters: X (image), + # dimension (homology dimension) + + # Compute the persistence pairs with Gudhi + cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) + cc.persistence() + try: + cof = cc.cofaces_of_persistence_pairs()[0][dimension] + except IndexError: + cof = np.array([]) + + if len(cof) > 0: + # Sort points with distance-to-diagonal + Xs = X.shape + pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] + perm = np.argsort(pers) + cof = cof[perm[::-1]] + + # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices + D = len(Xs) if len(cof) > 0 else 1 + ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) + count = 0 + for idx in range(0,2*cof.shape[0],2): + ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) + ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) + count += 1 + return np.array(ocof, dtype=np.int32) + +class CubicalLayer(tf.keras.layers.Layer): + """ + TensorFlow layer for computing cubical persistence out of a cubical complex + + Attributes: + dimension (int): homology dimension + """ + def __init__(self, dimension=1, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.dimension = dimension + + def build(self): + super.build() + + def call(self, X): + """ + Compute persistence diagram associated to a cubical complex filtered by some pixel values + + Parameters: + X (TensorFlow variable): pixel values of the cubical complex + """ + # Compute pixels associated to positive and negative simplices + # Don't compute gradient for this operation + indices = tf.stop_gradient(_Cubical(X.numpy(), self.dimension)) + # Get persistence diagram by simply picking the corresponding entries in the image + dgm = tf.reshape(tf.gather_nd(X, tf.reshape(indices, [-1,len(X.shape)])), [-1,2]) + return dgm diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py new file mode 100644 index 00000000..fc963d2f --- /dev/null +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -0,0 +1,77 @@ +import numpy as np +import tensorflow as tf + +######################################### +# Lower star filtration on simplex tree # +######################################### + +# The parameters of the model are the vertex function values of the simplex tree. + +def _LowerStarSimplexTree(simplextree, filtration, dimension): + # Parameters: simplextree (simplex tree on which to compute persistence) + # filtration (function values on the vertices of st), + # dimension (homology dimension), + + for s,_ in simplextree.get_filtration(): + simplextree.assign_filtration(s, -1e10) + + # Assign new filtration values + for i in range(simplextree.num_vertices()): + simplextree.assign_filtration([i], filtration[i]) + simplextree.make_filtration_non_decreasing() + + # Compute persistence diagram + dgm = simplextree.persistence() + + # Get vertex pairs for optimization. First, get all simplex pairs + pairs = simplextree.persistence_pairs() + + # Then, loop over all simplex pairs + indices, pers = [], [] + for s1, s2 in pairs: + # Select pairs with good homological dimension and finite lifetime + if len(s1) == dimension+1 and len(s2) > 0: + # Get IDs of the vertices corresponding to the filtration values of the simplices + l1, l2 = np.array(s1), np.array(s2) + i1 = l1[np.argmax(filtration[l1])] + i2 = l2[np.argmax(filtration[l2])] + indices.append(i1) + indices.append(i2) + # Compute lifetime + pers.append(simplextree.filtration(s2)-simplextree.filtration(s1)) + + # Sort vertex pairs wrt lifetime + perm = np.argsort(pers) + indices = np.reshape(indices, [-1,2])[perm][::-1,:].flatten() + + return np.array(indices, dtype=np.int32) + +class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): + """ + TensorFlow layer for computing lower-star persistence out of a simplex tree + + Attributes: + simplextree (gudhi.SimplexTree()): underlying simplex tree + dimension (int): homology dimension + """ + def __init__(self, simplextree, dimension=0, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.dimension = dimension + self.simplextree = simplextree + + def build(self): + super.build() + + def call(self, filtration): + """ + Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree + + Parameters: + F (TensorFlow variable): filter function values over the vertices of the simplex tree + """ + # Don't try to compute gradients for the vertex pairs + indices = tf.stop_gradient(_LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimension)) + # Get persistence diagram + self.dgm = tf.reshape(tf.gather(filtration, indices), [-1,2]) + return self.dgm + diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py new file mode 100644 index 00000000..373e021e --- /dev/null +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -0,0 +1,75 @@ +import numpy as np +import tensorflow as tf +from ..rips_complex import RipsComplex + +############################ +# Vietoris-Rips filtration # +############################ + +# The parameters of the model are the point coordinates. + +def _Rips(DX, max_edge, dimension): + # Parameters: DX (distance matrix), + # max_edge (maximum edge length for Rips filtration), + # dimension (homology dimension) + + # Compute the persistence pairs with Gudhi + rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) + st = rc.create_simplex_tree(max_dimension=dimension+1) + dgm = st.persistence() + pairs = st.persistence_pairs() + + # Retrieve vertices v_a and v_b by picking the ones achieving the maximal + # distance among all pairwise distances between the simplex vertices + indices, pers = [], [] + for s1, s2 in pairs: + if len(s1) == dimension+1 and len(s2) > 0: + l1, l2 = np.array(s1), np.array(s2) + i1 = [l1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(l1), len(l1)])] + i2 = [l2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(l2), len(l2)])] + indices.append(i1) + indices.append(i2) + pers.append(st.filtration(s2)-st.filtration(s1)) + + # Sort points with distance-to-diagonal + perm = np.argsort(pers) + indices = np.reshape(indices, [-1,4])[perm][::-1,:].flatten() + + return np.array(indices, dtype=np.int32) + +class RipsLayer(tf.keras.layers.Layer): + """ + TensorFlow layer for computing Rips persistence out of a point cloud + + Attributes: + maximum_edge_length (float): maximum edge length for the Rips complex + dimension (int): homology dimension + """ + def __init__(self, maximum_edge_length=12, dimension=1, **kwargs): + super().__init__(dynamic=True, **kwargs) + self.max_edge = maximum_edge_length + self.dimension = dimension + + def build(self): + super.build() + + def call(self, X): + """ + Compute Rips persistence diagram associated to a point cloud + + Parameters: + X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] + """ + # Compute distance matrix + DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) + # Compute vertices associated to positive and negative simplices + # Don't compute gradient for this operation + indices = tf.stop_gradient(_Rips(DX.numpy(), self.max_edge, self.dimension)) + # Get persistence diagram by simply picking the corresponding entries in the distance matrix + if self.dimension > 0: + dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(indices, [-1,2])), [-1,2]) + else: + indices = tf.reshape(indices, [-1,2])[1::2,:] + dgm = tf.concat([tf.zeros([indices.shape[0],1]), tf.reshape(tf.gather_nd(DX, indices), [-1,1])], axis=1) + return dgm + -- cgit v1.2.3 From 7d3d9e57c7c72b0762db910c2638b08e596199df Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 19 Oct 2021 16:16:21 +0200 Subject: Make Cech benchmark work --- .../benchmark/cech_complex_benchmark.cpp | 24 ++++++++++++++-------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index cfeb0725..06d90757 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -9,7 +9,7 @@ */ #include -#include +#include #include #include #include @@ -33,7 +33,10 @@ using Points_off_reader = Gudhi::Points_off_reader; using Proximity_graph = Gudhi::Proximity_graph; using Rips_complex = Gudhi::rips_complex::Rips_complex; using Kernel = CGAL::Epeck_d; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Point_cgal = typename Kernel::Point_d; +using Point_cloud_cgal = std::vector; +using Points_off_reader_cgal = Gudhi::Points_off_reader; +using Cech_complex = Gudhi::cech_complex::Cech_complex; class Minimal_enclosing_ball_radius { public: @@ -65,6 +68,7 @@ int main(int argc, char* argv[]) { // Extract the points from the file filepoints Points_off_reader off_reader(off_file_points); + Points_off_reader_cgal off_reader_cgal(off_file_points); Gudhi::Clock euclidean_clock("Gudhi::Euclidean_distance"); // Compute the proximity graph of the points @@ -79,16 +83,16 @@ int main(int argc, char* argv[]) { off_reader.get_point_cloud(), threshold, Minimal_enclosing_ball_radius()); std::clog << miniball_clock << std::endl; - Gudhi::Clock common_miniball_clock("Gudhi::Minimal_enclosing_ball_radius()"); + Gudhi::Clock cgal_miniball_clock("Gudhi::Minimal_enclosing_ball_radius_cgal()"); // Compute the proximity graph of the points - Proximity_graph common_miniball_prox_graph = Gudhi::compute_proximity_graph( - off_reader.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius()); - std::clog << common_miniball_clock << std::endl; + Proximity_graph cgal_miniball_prox_graph = Gudhi::compute_proximity_graph( + off_reader_cgal.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius()); + std::clog << cgal_miniball_clock << std::endl; boost::filesystem::path full_path(boost::filesystem::current_path()); std::clog << "Current path is : " << full_path << std::endl; - std::clog << "File name;Radius;Rips time;Cech time; Ratio Rips/Cech time;Rips nb simplices;Cech nb simplices;" + std::clog << "File name; Radius; Rips time; Cech time; Ratio Rips/Cech time; Rips nb simplices; Cech nb simplices;" << std::endl; boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) { @@ -96,13 +100,15 @@ int main(int argc, char* argv[]) { if (itr->path().extension() == ".off") // see below { Points_off_reader off_reader(itr->path().string()); + Points_off_reader_cgal off_reader_cgal(itr->path().string()); + Point p0 = off_reader.get_point_cloud()[0]; for (Filtration_value radius = 0.1; radius < 0.4; radius += 0.1) { std::clog << itr->path().stem() << ";"; std::clog << radius << ";"; Gudhi::Clock rips_clock("Rips computation"); - Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius, + Rips_complex rips_complex_from_points(off_reader_cgal.get_point_cloud(), radius, Gudhi::Minimal_enclosing_ball_radius()); Simplex_tree rips_stree; rips_complex_from_points.create_complex(rips_stree, p0.size() - 1); @@ -113,7 +119,7 @@ int main(int argc, char* argv[]) { std::clog << rips_sec << ";"; Gudhi::Clock cech_clock("Cech computation"); - Cech_complex cech_complex_from_points(off_reader.get_point_cloud(), radius); + Cech_complex cech_complex_from_points(off_reader_cgal.get_point_cloud(), radius); Simplex_tree cech_stree; cech_complex_from_points.create_complex(cech_stree, p0.size() - 1); // ------------------------------------------ -- cgit v1.2.3 From c9d6439fb9a6e65d7aa9f18bce675de65e901a0d Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 25 Oct 2021 11:43:09 +0200 Subject: Rename WITH_NETWORK option to WITH_GUDHI_REMOTE_TEST --- .appveyor.yml | 2 +- .circleci/config.yml | 2 +- .github/for_maintainers/tests_strategy.md | 2 +- src/cmake/modules/GUDHI_modules.cmake | 2 +- src/common/doc/installation.h | 4 ++-- src/python/CMakeLists.txt | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 521ec42d..ee6067e0 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -30,7 +30,7 @@ environment: CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON - target: PythonTestsWithNetwork - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_NETWORK=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON cache: diff --git a/.circleci/config.yml b/.circleci/config.yml index 85e42f8a..262e124b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -89,7 +89,7 @@ jobs: git submodule update mkdir build cd build - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_TEST=ON -DWITH_NETWORK=ON .. + cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON .. cd src/python python3 setup.py build_ext --inplace ctest --output-on-failure diff --git a/.github/for_maintainers/tests_strategy.md b/.github/for_maintainers/tests_strategy.md index 8fd7ac0d..610e1749 100644 --- a/.github/for_maintainers/tests_strategy.md +++ b/.github/for_maintainers/tests_strategy.md @@ -15,7 +15,7 @@ As all the third parties are already installed (thanks to docker), the compilati * utils (C++) * doxygen (C++ documentation that is available in the artefacts) * python (including documentation and code coverage that are available in the artefacts) -* python_tests_with_network (includes previous python with WITH_NETWORK option enabled which adds datasets fetching test) +* python_tests_with_network (includes previous python with WITH_GUDHI_REMOTE_TEST option enabled which adds datasets fetching test) (cf. `.circleci/config.yml`) diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake index 9cc1a8f5..7cdce307 100644 --- a/src/cmake/modules/GUDHI_modules.cmake +++ b/src/cmake/modules/GUDHI_modules.cmake @@ -19,7 +19,7 @@ endfunction(add_gudhi_module) option(WITH_GUDHI_BENCHMARK "Activate/deactivate benchmark compilation" OFF) option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation and installation" OFF) -option(WITH_NETWORK "Activate/deactivate datasets fetching test which uses the Internet" OFF) +option(WITH_GUDHI_REMOTE_TEST "Activate/deactivate datasets fetching test which uses the Internet" OFF) option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON) option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 72d4b1e5..b0fbdf20 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -40,8 +40,8 @@ make \endverbatim * `make test` is using Ctest (CMake test driver * program). If some of the tests are failing, please send us the result of the following command: * \verbatim ctest --output-on-failure \endverbatim - * Testing fetching datasets feature requires the use of the internet and is disabled by default. If you want to include this test, set WITH_NETWORK to ON when building in the previous step (note that this test is included in the python module): - * \verbatim cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_TEST=ON -DWITH_NETWORK=ON --DWITH_GUDHI_PYTHON=ON .. \endverbatim + * Testing fetching datasets feature requires the use of the internet and is disabled by default. If you want to include this test, set WITH_GUDHI_REMOTE_TEST to ON when building in the previous step (note that this test is included in the python module): + * \verbatim cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON --DWITH_GUDHI_PYTHON=ON .. \endverbatim * * \subsection documentationgeneration Documentation * To generate the documentation, Doxygen is required. diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 6c8dfe32..ddb5c9c2 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -543,7 +543,7 @@ if(PYTHONINTERP_FOUND) endif() # Fetch remote datasets - if(WITH_NETWORK) + if(WITH_GUDHI_REMOTE_TEST) add_gudhi_py_test(test_remote_datasets) endif() -- cgit v1.2.3 From 5db7ab2b55262a88c0ceecbb9c7ea004d9ed087e Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 25 Oct 2021 15:34:03 +0200 Subject: Enable WITH_GUDHI_REMOTE_TEST option for python target in CI platforms --- .appveyor.yml | 11 ++++------- .circleci/config.yml | 20 +------------------- .github/for_maintainers/tests_strategy.md | 12 +++++------- azure-pipelines.yml | 2 +- src/cmake/modules/GUDHI_modules.cmake | 6 +++--- 5 files changed, 14 insertions(+), 37 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index ee6067e0..e90f1b83 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -27,10 +27,7 @@ environment: CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF - target: Python - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON - - - target: PythonTestsWithNetwork - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DWITH_GUDHI_REMOTE_TEST=ON cache: @@ -59,12 +56,12 @@ build_script: - mkdir build - cd build - cmake -G "Visual Studio 15 2017 Win64" %CMAKE_FLAGS% %CMAKE_GMP_FLAGS% %CMAKE_MPFR_FLAGS% %CMAKE_VCPKG_FLAGS% .. - - if or ([%target%]==[Python]) ([%target%]==[PythonTestsWithNetwork]) { + - if [%target%]==[Python] ( cd src/python & type setup.py & MSBuild Cython.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 --output-on-failure -C Release - } else { + ) else ( MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 & ctest -j 1 --output-on-failure -C Release -E diff_files - } + ) diff --git a/.circleci/config.yml b/.circleci/config.yml index 262e124b..90737006 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -61,7 +61,7 @@ jobs: cmake -DUSER_VERSION_DIR=version .. make user_version cd version - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 . + cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_REMOTE_TEST=ON . cd python python3 setup.py build_ext --inplace make sphinx @@ -77,23 +77,6 @@ jobs: path: /tmp/htmlcov destination: htmlcov - python_tests_with_network: - docker: - - image: gudhi/ci_for_gudhi:latest - steps: - - checkout - - run: - name: Build and test python module with network - command: | - git submodule init - git submodule update - mkdir build - cd build - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON .. - cd src/python - python3 setup.py build_ext --inplace - ctest --output-on-failure - doxygen: docker: - image: gudhi/ci_for_gudhi:latest @@ -262,5 +245,4 @@ workflows: - tests - utils - python - - python_tests_with_network - doxygen diff --git a/.github/for_maintainers/tests_strategy.md b/.github/for_maintainers/tests_strategy.md index 610e1749..2bba3f42 100644 --- a/.github/for_maintainers/tests_strategy.md +++ b/.github/for_maintainers/tests_strategy.md @@ -14,8 +14,7 @@ As all the third parties are already installed (thanks to docker), the compilati * tests (C++) * utils (C++) * doxygen (C++ documentation that is available in the artefacts) -* python (including documentation and code coverage that are available in the artefacts) -* python_tests_with_network (includes previous python with WITH_GUDHI_REMOTE_TEST option enabled which adds datasets fetching test) +* python (including documentation and code coverage that are available in the artefacts; here the WITH_GUDHI_REMOTE_TEST option is enabled which adds datasets fetching test) (cf. `.circleci/config.yml`) @@ -40,13 +39,12 @@ docker push gudhi/ci_for_gudhi_wo_cgal:latest ### Windows -The compilations has been seperated by categories to be parallelized, but I don't know why builds are not run in parallel: +The compilations has been separated by categories to be parallelized, but I don't know why builds are not run in parallel: * examples (C++) * tests (C++) * utils (C++) -* python -* python tests with network +* python (here the WITH_GUDHI_REMOTE_TEST option is enabled which adds datasets fetching test) Doxygen (C++) is not tested. (cf. `.appveyor.yml`) @@ -56,12 +54,12 @@ In case of installation issue, check in [vcpkg issues](https://github.com/micros ### OSx -The compilations has been seperated by categories to be parallelized: +The compilations has been separated by categories to be parallelized: * examples (C++) * tests (C++) * utils (C++) -* python +* python (here the WITH_GUDHI_REMOTE_TEST option is enabled which adds datasets fetching test) * Doxygen (C++) (cf. `azure-pipelines.yml`) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 6c194f2a..6e102b83 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -30,7 +30,7 @@ jobs: source activate gudhi_build_env mkdir build cd build - cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 .. + cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_REMOTE_TEST=ON .. make -j 4 make doxygen ctest -j 4 --output-on-failure # -E sphinx remove sphinx build as it fails diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake index 7cdce307..cbed6351 100644 --- a/src/cmake/modules/GUDHI_modules.cmake +++ b/src/cmake/modules/GUDHI_modules.cmake @@ -18,11 +18,11 @@ function(add_gudhi_module file_path) endfunction(add_gudhi_module) option(WITH_GUDHI_BENCHMARK "Activate/deactivate benchmark compilation" OFF) -option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation and installation" OFF) +option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation" OFF) option(WITH_GUDHI_REMOTE_TEST "Activate/deactivate datasets fetching test which uses the Internet" OFF) option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) -option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON) -option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON) +option(WITH_GUDHI_TEST "Activate/deactivate tests compilation" ON) +option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation" ON) if (WITH_GUDHI_BENCHMARK) set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};benchmark") -- cgit v1.2.3 From c4269eef025d4e6c7a763cd99b5dada647693c1d Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 1 Nov 2021 14:36:11 +0100 Subject: fix doc --- src/python/doc/cubical_complex_tflow_itf_ref.rst | 9 ++++++++- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 11 +++++++++-- src/python/doc/rips_complex_tflow_itf_ref.rst | 8 +++++++- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index e85cfdc6..a907dfce 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -10,7 +10,7 @@ TensorFlow layer for cubical persistence Example of gradient computed from cubical persistence ----------------------------------------------------- -.. code-block:: python +.. testcode:: from gudhi.tensorflow import * import numpy as np @@ -23,9 +23,16 @@ Example of gradient computed from cubical persistence with tf.GradientTape() as tape: dgm = cl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) print(grads[0].numpy()) +.. testoutput:: + + [[ 0. 0. 0. ] + [ 0. 0.5 0. ] + [ 0. 0. -0.5]] + Documentation for CubicalLayer ------------------------------ diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index 7baf611c..26cf1ff2 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -10,7 +10,7 @@ TensorFlow layer for lower-star persistence on simplex trees Example of gradient computed from lower-star filtration of a simplex tree ------------------------------------------------------------------------- -.. code-block:: python +.. testcode:: from gudhi.tensorflow import * import numpy as np @@ -47,8 +47,15 @@ Example of gradient computed from lower-star filtration of a simplex tree with tf.GradientTape() as tape: dgm = sl.call(F) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [F]) - print(grads[0].numpy()) + print(grads[0].indices.numpy()) + print(grads[0].values.numpy()) + +.. testoutput:: + + [2 4] + [-1. 1.] Documentation for LowerStarSimplexTreeLayer ------------------------------------------- diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 15ba4c8e..7300eba0 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -10,7 +10,7 @@ TensorFlow layer for Vietoris-Rips persistence Example of gradient computed from Vietoris-Rips persistence ----------------------------------------------------------- -.. code-block:: python +.. testcode:: from gudhi.tensorflow import * import numpy as np @@ -23,9 +23,15 @@ Example of gradient computed from Vietoris-Rips persistence with tf.GradientTape() as tape: dgm = rl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) print(grads[0].numpy()) +.. testoutput:: + + [[-0.5 -0.5] + [ 0.5 0.5]] + Documentation for RipsLayer --------------------------- -- cgit v1.2.3 From 6b16678c71daa2b9b56cc8fa79a18cde080298cc Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 1 Nov 2021 15:38:41 +0100 Subject: fix installation doc --- src/python/doc/installation.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst index 35c344e3..25eb7a90 100644 --- a/src/python/doc/installation.rst +++ b/src/python/doc/installation.rst @@ -393,7 +393,11 @@ mathematics, science, and engineering. TensorFlow ---------- -`TensorFlow `_ is currently only used in some automatic differentiation tests. +The :doc:`cubical complex `, :doc:`simplex tree ` +and :doc:`Rips complex ` modules require `TensorFlow `_ +for incorporating them in neural nets. + +`TensorFlow `_ is also used in some automatic differentiation tests. Bug reports and contributions ***************************** -- cgit v1.2.3 From 734622d5a8816cfdaaed2aaa4b9b3212fb6a259c Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Fri, 5 Nov 2021 00:35:51 +0100 Subject: update doc and pieces of code --- src/python/doc/cubical_complex_tflow_itf_ref.rst | 4 +-- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 4 +-- src/python/doc/rips_complex_tflow_itf_ref.rst | 4 +-- src/python/gudhi/tensorflow/cubical_layer.py | 19 +++++------ .../tensorflow/lower_star_simplex_tree_layer.py | 25 +++++++------- src/python/gudhi/tensorflow/rips_layer.py | 39 ++++++++++------------ 6 files changed, 42 insertions(+), 53 deletions(-) diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index a907dfce..582e0551 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -13,11 +13,9 @@ Example of gradient computed from cubical persistence .. testcode:: from gudhi.tensorflow import * - import numpy as np import tensorflow as tf - Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32) - X = tf.Variable(initial_value=Xinit, trainable=True) + X = tf.Variable([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=tf.float32, trainable=True) cl = CubicalLayer(dimension=0) with tf.GradientTape() as tape: diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index 26cf1ff2..6c8b5a08 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -13,7 +13,6 @@ Example of gradient computed from lower-star filtration of a simplex tree .. testcode:: from gudhi.tensorflow import * - import numpy as np import tensorflow as tf import gudhi as gd @@ -40,8 +39,7 @@ Example of gradient computed from lower-star filtration of a simplex tree st.insert([8, 9]) st.insert([9, 10]) - Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) - F = tf.Variable(initial_value=Finit, trainable=True) + F = tf.Variable([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=tf.float32, trainable=True) sl = LowerStarSimplexTreeLayer(simplextree=st, dimension=0) with tf.GradientTape() as tape: diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 7300eba0..bd9c5da5 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -13,11 +13,9 @@ Example of gradient computed from Vietoris-Rips persistence .. testcode:: from gudhi.tensorflow import * - import numpy as np import tensorflow as tf - Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32) - X = tf.Variable(initial_value=Xinit, trainable=True) + X = tf.Variable([[1.,1.],[2.,2.]], dtype=tf.float32, trainable=True) rl = RipsLayer(maximum_edge_length=2., dimension=0) with tf.GradientTape() as tape: diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index e36adec5..b4ff2598 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -20,13 +20,6 @@ def _Cubical(X, dimension): except IndexError: cof = np.array([]) - if len(cof) > 0: - # Sort points with distance-to-diagonal - Xs = X.shape - pers = [X[np.unravel_index(cof[idx,1], Xs)] - X[np.unravel_index(cof[idx,0], Xs)] for idx in range(len(cof))] - perm = np.argsort(pers) - cof = cof[perm[::-1]] - # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices D = len(Xs) if len(cof) > 0 else 1 ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) @@ -40,11 +33,14 @@ def _Cubical(X, dimension): class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing cubical persistence out of a cubical complex - - Attributes: - dimension (int): homology dimension """ def __init__(self, dimension=1, **kwargs): + """ + Constructor for the CubicalLayer class + + Parameters: + dimension (int): homology dimension + """ super().__init__(dynamic=True, **kwargs) self.dimension = dimension @@ -57,6 +53,9 @@ class CubicalLayer(tf.keras.layers.Layer): Parameters: X (TensorFlow variable): pixel values of the cubical complex + + Returns: + dgm (TensorFlow variable): cubical persistence diagram with shape [num_points, 2] """ # Compute pixels associated to positive and negative simplices # Don't compute gradient for this operation diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index fc963d2f..4f515386 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -21,7 +21,7 @@ def _LowerStarSimplexTree(simplextree, filtration, dimension): simplextree.make_filtration_non_decreasing() # Compute persistence diagram - dgm = simplextree.persistence() + dgm = simplextree.compute_persistence() # Get vertex pairs for optimization. First, get all simplex pairs pairs = simplextree.persistence_pairs() @@ -37,24 +37,22 @@ def _LowerStarSimplexTree(simplextree, filtration, dimension): i2 = l2[np.argmax(filtration[l2])] indices.append(i1) indices.append(i2) - # Compute lifetime - pers.append(simplextree.filtration(s2)-simplextree.filtration(s1)) - - # Sort vertex pairs wrt lifetime - perm = np.argsort(pers) - indices = np.reshape(indices, [-1,2])[perm][::-1,:].flatten() + indices = np.reshape(indices, [-1,2]).flatten() return np.array(indices, dtype=np.int32) class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing lower-star persistence out of a simplex tree - - Attributes: - simplextree (gudhi.SimplexTree()): underlying simplex tree - dimension (int): homology dimension """ def __init__(self, simplextree, dimension=0, **kwargs): + """ + Constructor for the LowerStarSimplexTreeLayer class + + Parameters: + simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n = number of vertices + dimension (int): homology dimension + """ super().__init__(dynamic=True, **kwargs) self.dimension = dimension self.simplextree = simplextree @@ -67,7 +65,10 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree Parameters: - F (TensorFlow variable): filter function values over the vertices of the simplex tree + F (TensorFlow variable): filter function values over the vertices of the simplex tree. The ith entry of F corresponds to vertex i in self.simplextree + + Returns: + dgm (TensorFlow variable): lower-star persistence diagram with shape [num_points, 2] """ # Don't try to compute gradients for the vertex pairs indices = tf.stop_gradient(_LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimension)) diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 373e021e..6d54871c 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -17,35 +17,26 @@ def _Rips(DX, max_edge, dimension): rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) st = rc.create_simplex_tree(max_dimension=dimension+1) dgm = st.persistence() - pairs = st.persistence_pairs() - - # Retrieve vertices v_a and v_b by picking the ones achieving the maximal - # distance among all pairwise distances between the simplex vertices - indices, pers = [], [] - for s1, s2 in pairs: - if len(s1) == dimension+1 and len(s2) > 0: - l1, l2 = np.array(s1), np.array(s2) - i1 = [l1[v] for v in np.unravel_index(np.argmax(DX[l1,:][:,l1]),[len(l1), len(l1)])] - i2 = [l2[v] for v in np.unravel_index(np.argmax(DX[l2,:][:,l2]),[len(l2), len(l2)])] - indices.append(i1) - indices.append(i2) - pers.append(st.filtration(s2)-st.filtration(s1)) - - # Sort points with distance-to-diagonal - perm = np.argsort(pers) - indices = np.reshape(indices, [-1,4])[perm][::-1,:].flatten() + if dimension == 0: + pairs = st.flag_persistence_generators()[0] + else: + pairs = st.flag_persistence_generators()[1][dimension-1] + indices = pairs.flatten() return np.array(indices, dtype=np.int32) class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud - - Attributes: - maximum_edge_length (float): maximum edge length for the Rips complex - dimension (int): homology dimension """ def __init__(self, maximum_edge_length=12, dimension=1, **kwargs): + """ + Constructor for the RipsLayer class + + Parameters: + maximum_edge_length (float): maximum edge length for the Rips complex + dimension (int): homology dimension + """ super().__init__(dynamic=True, **kwargs) self.max_edge = maximum_edge_length self.dimension = dimension @@ -59,6 +50,9 @@ class RipsLayer(tf.keras.layers.Layer): Parameters: X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] + + Returns: + dgm (TensorFlow variable): Rips persistence diagram with shape [num_points, 2] with points sorted by """ # Compute distance matrix DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) @@ -69,7 +63,8 @@ class RipsLayer(tf.keras.layers.Layer): if self.dimension > 0: dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(indices, [-1,2])), [-1,2]) else: - indices = tf.reshape(indices, [-1,2])[1::2,:] + #indices = tf.reshape(indices, [-1,2])[1::2,:] + indices = indices[:,1:] dgm = tf.concat([tf.zeros([indices.shape[0],1]), tf.reshape(tf.gather_nd(DX, indices), [-1,1])], axis=1) return dgm -- cgit v1.2.3 From bd7134d71628958e4e281817f746b0ad7ad83d00 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Fri, 5 Nov 2021 19:21:54 +0100 Subject: modified API for multiple dimensions and finite + essential --- src/python/gudhi/tensorflow/cubical_layer.py | 53 ++++++++++-------- .../tensorflow/lower_star_simplex_tree_layer.py | 52 +++++++++--------- src/python/gudhi/tensorflow/rips_layer.py | 62 ++++++++++++++-------- src/python/test/test_diff.py | 12 ++--- 4 files changed, 102 insertions(+), 77 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index b4ff2598..d8177864 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -8,41 +8,48 @@ from ..cubical_complex import CubicalComplex # The parameters of the model are the pixel values. -def _Cubical(X, dimension): +def _Cubical(X, dimensions): # Parameters: X (image), - # dimension (homology dimension) + # dimensions (homology dimensions) # Compute the persistence pairs with Gudhi - cc = CubicalComplex(dimensions=X.shape, top_dimensional_cells=X.flatten()) + Xs = X.shape + cc = CubicalComplex(dimensions=Xs, top_dimensional_cells=X.flatten()) cc.persistence() - try: - cof = cc.cofaces_of_persistence_pairs()[0][dimension] - except IndexError: - cof = np.array([]) - # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices - D = len(Xs) if len(cof) > 0 else 1 - ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) - count = 0 - for idx in range(0,2*cof.shape[0],2): - ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) - ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) - count += 1 - return np.array(ocof, dtype=np.int32) + L_cofs = [] + for dim in dimensions: + + try: + cof = cc.cofaces_of_persistence_pairs()[0][dim] + except IndexError: + cof = np.array([]) + + # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices + D = len(Xs) if len(cof) > 0 else 1 + ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) + count = 0 + for idx in range(0,2*cof.shape[0],2): + ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) + ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) + count += 1 + L_cofs.append(np.array(ocof, dtype=np.int32)) + + return L_cofs class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing cubical persistence out of a cubical complex """ - def __init__(self, dimension=1, **kwargs): + def __init__(self, dimensions=[1], **kwargs): """ Constructor for the CubicalLayer class Parameters: - dimension (int): homology dimension + dimensions (list of int): homology dimensions """ super().__init__(dynamic=True, **kwargs) - self.dimension = dimension + self.dimensions = dimensions def build(self): super.build() @@ -55,11 +62,11 @@ class CubicalLayer(tf.keras.layers.Layer): X (TensorFlow variable): pixel values of the cubical complex Returns: - dgm (TensorFlow variable): cubical persistence diagram with shape [num_points, 2] + dgms (list of TensorFlow variables): list of cubical persistence diagrams of length self.dimensions, where each element contains a finite persistence diagram of shape [num_finite_points, 2] """ # Compute pixels associated to positive and negative simplices # Don't compute gradient for this operation - indices = tf.stop_gradient(_Cubical(X.numpy(), self.dimension)) + indices = _Cubical(X.numpy(), self.dimensions) # Get persistence diagram by simply picking the corresponding entries in the image - dgm = tf.reshape(tf.gather_nd(X, tf.reshape(indices, [-1,len(X.shape)])), [-1,2]) - return dgm + self.dgms = [tf.reshape(tf.gather_nd(X, tf.reshape(indice, [-1,len(X.shape)])), [-1,2]) for indice in indices] + return self.dgms diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index 4f515386..c509c456 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -7,10 +7,10 @@ import tensorflow as tf # The parameters of the model are the vertex function values of the simplex tree. -def _LowerStarSimplexTree(simplextree, filtration, dimension): +def _LowerStarSimplexTree(simplextree, filtration, dimensions): # Parameters: simplextree (simplex tree on which to compute persistence) # filtration (function values on the vertices of st), - # dimension (homology dimension), + # dimensions (homology dimensions), for s,_ in simplextree.get_filtration(): simplextree.assign_filtration(s, -1e10) @@ -21,40 +21,38 @@ def _LowerStarSimplexTree(simplextree, filtration, dimension): simplextree.make_filtration_non_decreasing() # Compute persistence diagram - dgm = simplextree.compute_persistence() + simplextree.compute_persistence() # Get vertex pairs for optimization. First, get all simplex pairs - pairs = simplextree.persistence_pairs() + pairs = simplextree.lower_star_persistence_generators() - # Then, loop over all simplex pairs - indices, pers = [], [] - for s1, s2 in pairs: - # Select pairs with good homological dimension and finite lifetime - if len(s1) == dimension+1 and len(s2) > 0: - # Get IDs of the vertices corresponding to the filtration values of the simplices - l1, l2 = np.array(s1), np.array(s2) - i1 = l1[np.argmax(filtration[l1])] - i2 = l2[np.argmax(filtration[l2])] - indices.append(i1) - indices.append(i2) + L_indices = [] + for dimension in dimensions: - indices = np.reshape(indices, [-1,2]).flatten() - return np.array(indices, dtype=np.int32) + finite_pairs = pairs[0][dimension] if len(pairs[0]) >= dimension+1 else np.empty(shape=[0,2]) + essential_pairs = pairs[1][dimension] if len(pairs[1]) >= dimension+1 else np.empty(shape=[0,1]) + + finite_indices = np.array(finite_pairs.flatten(), dtype=np.int32) + essential_indices = np.array(essential_pairs.flatten(), dtype=np.int32) + + L_indices.append((finite_indices, essential_indices)) + + return L_indices class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing lower-star persistence out of a simplex tree """ - def __init__(self, simplextree, dimension=0, **kwargs): + def __init__(self, simplextree, dimensions=[0], **kwargs): """ Constructor for the LowerStarSimplexTreeLayer class Parameters: simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n = number of vertices - dimension (int): homology dimension + dimensions (int): homology dimensions """ super().__init__(dynamic=True, **kwargs) - self.dimension = dimension + self.dimensions = dimensions self.simplextree = simplextree def build(self): @@ -68,11 +66,15 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): F (TensorFlow variable): filter function values over the vertices of the simplex tree. The ith entry of F corresponds to vertex i in self.simplextree Returns: - dgm (TensorFlow variable): lower-star persistence diagram with shape [num_points, 2] + dgms (list of tuple of TensorFlow variables): list of lower-star persistence diagrams of length self.dimensions, where each element of the list is a tuple that contains the finite and essential persistence diagrams of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Don't try to compute gradients for the vertex pairs - indices = tf.stop_gradient(_LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimension)) - # Get persistence diagram - self.dgm = tf.reshape(tf.gather(filtration, indices), [-1,2]) - return self.dgm + indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions) + # Get persistence diagrams + self.dgms = [] + for idx_dim, dimension in enumerate(self.dimensions): + finite_dgm = tf.reshape(tf.gather(filtration, indices[idx_dim][0]), [-1,2]) + essential_dgm = tf.reshape(tf.gather(filtration, indices[idx_dim][1]), [-1,1]) + self.dgms.append((finite_dgm, essential_dgm)) + return self.dgms diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 6d54871c..83387d21 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -8,38 +8,49 @@ from ..rips_complex import RipsComplex # The parameters of the model are the point coordinates. -def _Rips(DX, max_edge, dimension): +def _Rips(DX, max_edge, dimensions): # Parameters: DX (distance matrix), # max_edge (maximum edge length for Rips filtration), - # dimension (homology dimension) + # dimensions (homology dimensions) # Compute the persistence pairs with Gudhi rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) - st = rc.create_simplex_tree(max_dimension=dimension+1) - dgm = st.persistence() - if dimension == 0: - pairs = st.flag_persistence_generators()[0] - else: - pairs = st.flag_persistence_generators()[1][dimension-1] + st = rc.create_simplex_tree(max_dimension=max(dimensions)+1) + st.persistence() + pairs = st.flag_persistence_generators() - indices = pairs.flatten() - return np.array(indices, dtype=np.int32) + L_indices = [] + for dimension in dimensions: + + if dimension == 0: + finite_pairs = pairs[0] + essential_pairs = pairs[2] + else: + finite_pairs = pairs[1][dimension-1] if len(pairs[1]) >= dimension else np.empty(shape=[0,4]) + essential_pairs = pairs[3][dimension-1] if len(pairs[3]) >= dimension else np.empty(shape=[0,2]) + + finite_indices = np.array(finite_pairs.flatten(), dtype=np.int32) + essential_indices = np.array(essential_pairs.flatten(), dtype=np.int32) + + L_indices.append((finite_indices, essential_indices)) + + return L_indices class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, maximum_edge_length=12, dimension=1, **kwargs): + def __init__(self, maximum_edge_length=12, dimensions=[1], **kwargs): """ Constructor for the RipsLayer class Parameters: maximum_edge_length (float): maximum edge length for the Rips complex - dimension (int): homology dimension + dimensions (int): homology dimensions """ super().__init__(dynamic=True, **kwargs) self.max_edge = maximum_edge_length - self.dimension = dimension + self.dimensions = dimensions def build(self): super.build() @@ -52,19 +63,24 @@ class RipsLayer(tf.keras.layers.Layer): X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] Returns: - dgm (TensorFlow variable): Rips persistence diagram with shape [num_points, 2] with points sorted by + dgms (list of tuple of TensorFlow variables): list of Rips persistence diagrams of length self.dimensions, where each element of the list is a tuple that contains the finite and essential persistence diagrams of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Compute distance matrix DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) # Compute vertices associated to positive and negative simplices # Don't compute gradient for this operation - indices = tf.stop_gradient(_Rips(DX.numpy(), self.max_edge, self.dimension)) - # Get persistence diagram by simply picking the corresponding entries in the distance matrix - if self.dimension > 0: - dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(indices, [-1,2])), [-1,2]) - else: - #indices = tf.reshape(indices, [-1,2])[1::2,:] - indices = indices[:,1:] - dgm = tf.concat([tf.zeros([indices.shape[0],1]), tf.reshape(tf.gather_nd(DX, indices), [-1,1])], axis=1) - return dgm + indices = _Rips(DX.numpy(), self.max_edge, self.dimensions) + # Get persistence diagrams by simply picking the corresponding entries in the distance matrix + self.dgms = [] + for idx_dim, dimension in enumerate(self.dimensions): + cur_idx = indices[idx_dim] + if dimension > 0: + finite_dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(cur_idx[0], [-1,2])), [-1,2]) + essential_dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(cur_idx[1], [-1,2])), [-1,1]) + else: + reshaped_cur_idx = tf.reshape(cur_idx[0], [-1,3]) + finite_dgm = tf.concat([tf.zeros([reshaped_cur_idx.shape[0],1]), tf.reshape(tf.gather_nd(DX, reshaped_cur_idx[:,1:]), [-1,1])], axis=1) + essential_dgm = tf.zeros([cur_idx[1].shape[0],1]) + self.dgms.append((finite_dgm, essential_dgm)) + return self.dgms diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index 73a03697..f49eff7b 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -7,10 +7,10 @@ def test_rips_diff(): Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32) X = tf.Variable(initial_value=Xinit, trainable=True) - rl = RipsLayer(maximum_edge_length=2., dimension=0) + rl = RipsLayer(maximum_edge_length=2., dimensions=[0]) with tf.GradientTape() as tape: - dgm = rl.call(X) + dgm = rl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[-.5,-.5],[.5,.5]])).sum() <= 1e-6 @@ -20,10 +20,10 @@ def test_cubical_diff(): Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32) X = tf.Variable(initial_value=Xinit, trainable=True) - cl = CubicalLayer(dimension=0) + cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X) + dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 @@ -55,10 +55,10 @@ def test_st_diff(): Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) F = tf.Variable(initial_value=Finit, trainable=True) - sl = LowerStarSimplexTreeLayer(simplextree=st, dimension=0) + sl = LowerStarSimplexTreeLayer(simplextree=st, dimensions=[0]) with tf.GradientTape() as tape: - dgm = sl.call(F) + dgm = sl.call(F)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [F]) -- cgit v1.2.3 From dacc47d8aa5e96700600cd93532363d5dfa6cd8a Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Sun, 7 Nov 2021 20:54:24 +0100 Subject: fix cubical code --- src/python/gudhi/tensorflow/cubical_layer.py | 16 +++++++++------- src/python/gudhi/tensorflow/rips_layer.py | 2 +- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index d8177864..55bd2685 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -14,24 +14,26 @@ def _Cubical(X, dimensions): # Compute the persistence pairs with Gudhi Xs = X.shape - cc = CubicalComplex(dimensions=Xs, top_dimensional_cells=X.flatten()) - cc.persistence() + cc = CubicalComplex(top_dimensional_cells=X) + cc.compute_persistence() + cof_pp = cc.cofaces_of_persistence_pairs() + L_cofs = [] for dim in dimensions: try: - cof = cc.cofaces_of_persistence_pairs()[0][dim] + cof = cof_pp[0][dim] except IndexError: cof = np.array([]) # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices D = len(Xs) if len(cof) > 0 else 1 - ocof = np.array([0 for _ in range(D*2*cof.shape[0])]) + ocof = np.zeros(D*2*cof.shape[0]) count = 0 for idx in range(0,2*cof.shape[0],2): - ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs) - ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs) + ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs, order='F') + ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs, order='F') count += 1 L_cofs.append(np.array(ocof, dtype=np.int32)) @@ -41,7 +43,7 @@ class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing cubical persistence out of a cubical complex """ - def __init__(self, dimensions=[1], **kwargs): + def __init__(self, dimensions=[0], **kwargs): """ Constructor for the CubicalLayer class diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 83387d21..7735db67 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -40,7 +40,7 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, maximum_edge_length=12, dimensions=[1], **kwargs): + def __init__(self, maximum_edge_length=12, dimensions=[0], **kwargs): """ Constructor for the RipsLayer class -- cgit v1.2.3 From 6ae793a8cad4503d1795e227d40d85d43954d1dd Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Fri, 12 Nov 2021 09:46:22 +0100 Subject: removed unraveling in cubical --- src/python/gudhi/tensorflow/cubical_layer.py | 15 ++++----------- .../gudhi/tensorflow/lower_star_simplex_tree_layer.py | 2 +- src/python/gudhi/tensorflow/rips_layer.py | 4 ++-- src/python/test/test_diff.py | 13 ++++++++++++- 4 files changed, 19 insertions(+), 15 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 55bd2685..70528f98 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -17,6 +17,7 @@ def _Cubical(X, dimensions): cc = CubicalComplex(top_dimensional_cells=X) cc.compute_persistence() + # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices cof_pp = cc.cofaces_of_persistence_pairs() L_cofs = [] @@ -27,15 +28,7 @@ def _Cubical(X, dimensions): except IndexError: cof = np.array([]) - # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices - D = len(Xs) if len(cof) > 0 else 1 - ocof = np.zeros(D*2*cof.shape[0]) - count = 0 - for idx in range(0,2*cof.shape[0],2): - ocof[D*idx:D*(idx+1)] = np.unravel_index(cof[count,0], Xs, order='F') - ocof[D*(idx+1):D*(idx+2)] = np.unravel_index(cof[count,1], Xs, order='F') - count += 1 - L_cofs.append(np.array(ocof, dtype=np.int32)) + L_cofs.append(np.array(cof, dtype=np.int32)) return L_cofs @@ -43,7 +36,7 @@ class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing cubical persistence out of a cubical complex """ - def __init__(self, dimensions=[0], **kwargs): + def __init__(self, dimensions, **kwargs): """ Constructor for the CubicalLayer class @@ -70,5 +63,5 @@ class CubicalLayer(tf.keras.layers.Layer): # Don't compute gradient for this operation indices = _Cubical(X.numpy(), self.dimensions) # Get persistence diagram by simply picking the corresponding entries in the image - self.dgms = [tf.reshape(tf.gather_nd(X, tf.reshape(indice, [-1,len(X.shape)])), [-1,2]) for indice in indices] + self.dgms = [tf.reshape(tf.gather( tf.reshape(tf.transpose(X), [-1]), indice ), [-1,2]) for indice in indices] return self.dgms diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index c509c456..fd1698ea 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -43,7 +43,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing lower-star persistence out of a simplex tree """ - def __init__(self, simplextree, dimensions=[0], **kwargs): + def __init__(self, simplextree, dimensions, **kwargs): """ Constructor for the LowerStarSimplexTreeLayer class diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 7735db67..a314229b 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -16,7 +16,7 @@ def _Rips(DX, max_edge, dimensions): # Compute the persistence pairs with Gudhi rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) st = rc.create_simplex_tree(max_dimension=max(dimensions)+1) - st.persistence() + st.compute_persistence() pairs = st.flag_persistence_generators() L_indices = [] @@ -40,7 +40,7 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, maximum_edge_length=12, dimensions=[0], **kwargs): + def __init__(self, maximum_edge_length=12, dimensions, **kwargs): """ Constructor for the RipsLayer class diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index f49eff7b..e0c99d07 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -15,7 +15,6 @@ def test_rips_diff(): grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[-.5,-.5],[.5,.5]])).sum() <= 1e-6 - def test_cubical_diff(): Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32) @@ -28,6 +27,18 @@ def test_cubical_diff(): grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 +def test_nonsquare_cubical_diff(): + + Xinit = np.array([[-1.,1.,0.],[1.,1.,1.]], dtype=np.float32) + X = tf.Variable(initial_value=Xinit, trainable=True) + cl = CubicalLayer(dimensions=[0]) + + with tf.GradientTape() as tape: + dgm = cl.call(X)[0] + loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) + assert np.abs(grads[0].numpy()-np.array([[0.,0.5,-0.5],[0.,0.,0.]])).sum() <= 1e-6 + def test_st_diff(): st = gd.SimplexTree() -- cgit v1.2.3 From 74dfd101312a48272f2f91c3ddc401d1148deaec Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Fri, 12 Nov 2021 12:13:49 +0100 Subject: fix non-default vs default --- src/python/gudhi/tensorflow/rips_layer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index a314229b..da7087f6 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -40,7 +40,7 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, maximum_edge_length=12, dimensions, **kwargs): + def __init__(self, dimensions, maximum_edge_length=12, **kwargs): """ Constructor for the RipsLayer class -- cgit v1.2.3 From 1fd37bf29d665330f1eb242139bc0faf10a542c1 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Fri, 12 Nov 2021 12:34:43 +0100 Subject: avoid transpose --- src/python/gudhi/tensorflow/cubical_layer.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 70528f98..0971a446 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -8,13 +8,14 @@ from ..cubical_complex import CubicalComplex # The parameters of the model are the pixel values. -def _Cubical(X, dimensions): - # Parameters: X (image), +def _Cubical(Xflat, Xdim, dimensions): + # Parameters: Xflat (flattened image), + # Xdim (shape of non-flattened image) # dimensions (homology dimensions) # Compute the persistence pairs with Gudhi - Xs = X.shape - cc = CubicalComplex(top_dimensional_cells=X) + # We reverse the dimensions because CubicalComplex uses Fortran ordering + cc = CubicalComplex(dimensions=Xdim[::-1], top_dimensional_cells=Xflat) cc.compute_persistence() # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices @@ -61,7 +62,9 @@ class CubicalLayer(tf.keras.layers.Layer): """ # Compute pixels associated to positive and negative simplices # Don't compute gradient for this operation - indices = _Cubical(X.numpy(), self.dimensions) + Xflat = tf.reshape(X, [-1]) + Xdim = X.shape + indices = _Cubical(Xflat.numpy(), Xdim, self.dimensions) # Get persistence diagram by simply picking the corresponding entries in the image - self.dgms = [tf.reshape(tf.gather( tf.reshape(tf.transpose(X), [-1]), indice ), [-1,2]) for indice in indices] + self.dgms = [tf.reshape(tf.gather(Xflat, indice), [-1,2]) for indice in indices] return self.dgms -- cgit v1.2.3 From aa600c433e1f756bec4323e29e86786b937d9443 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 15 Nov 2021 11:12:27 +0100 Subject: Print files licenses when available Wrap bunny fetching Add corresponding tests --- src/python/gudhi/datasets/remote.py | 38 ++++++++++++++++++++++- src/python/test/test_remote_datasets.py | 55 ++++++++++++++++++++++++++------- 2 files changed, 81 insertions(+), 12 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index aef4b277..7e8f9ce7 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -39,7 +39,7 @@ def _checksum_sha256(file_path): sha256_hash.update(buffer) return sha256_hash.hexdigest() -def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): +def fetch(url, filename, dirname = "remote_datasets", file_checksum = None, accept_license = False): """ Fetch the wanted dataset from the given url and save it in file_path @@ -54,6 +54,9 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): file_checksum : string The file checksum using sha256 to check against the one computed on the downloaded file. Default is 'None'. + accept_license : boolean + Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. + Default is False Returns ------- @@ -69,6 +72,7 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): if not exists(dirname): makedirs(dirname) + # Get the file urlretrieve(url, file_path) if file_checksum is not None: @@ -78,6 +82,13 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None): "different from expected : {}." "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, file_checksum)) + # Print license terms unless accept_license is set to True + if not accept_license: + license_file = join(dirname, "LICENSE") + if exists(license_file) and (file_path != license_file): + with open(license_file, 'r') as f: + print(f.read()) + return file_path def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): @@ -98,3 +109,28 @@ def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): """ return fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", filename, dirname, '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + +def fetch_bunny(filename = "bunny.off", dirname = "remote_datasets/bunny", accept_license = False): + """ + Fetch bunny.off remotely and its LICENSE file + + Parameters + ---------- + filename : string + The name to give to downloaded file. Default is "bunny.off" + dirname : string + The directory to save the file to. Default is "remote_datasets/bunny". + accept_license : boolean + Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. + Default is False + + Returns + ------- + files_paths: list of strings + Full paths of the created file and its LICENSE. + """ + + return [fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/LICENSE", "LICENSE", dirname, + 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956'), + fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/bunny.off", filename, dirname, + '11852d5e73e2d4bd7b86a2c5cc8a5884d0fbb72539493e8cec100ea922b19f5b', accept_license)] diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index e252980d..e777abc6 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -11,36 +11,69 @@ from gudhi.datasets import remote import re import os.path +import io +import sys import pytest -def check_dir_file_names(path_file_dw, filename, dirname): +def _check_dir_file_names(path_file_dw, filename, dirname): assert os.path.isfile(path_file_dw) names_dw = re.split(r' |/|\\', path_file_dw) - assert dirname == names_dw[0] - assert filename == names_dw[1] + # Case where inner directories are created in "remote_datasets/"; e.g: "remote_datasets/bunny" + if len(names_dw) >= 3: + for i in range(len(names_dw)-1): + assert re.split(r' |/|\\', dirname)[i] == names_dw[i] + assert filename == names_dw[i+1] + else: + assert dirname == names_dw[0] + assert filename == names_dw[1] -def check_fetch_output(url, filename, dirname = "remote_datasets", file_checksum = None): +def _check_fetch_output(url, filename, dirname = "remote_datasets", file_checksum = None): path_file_dw = remote.fetch(url, filename, dirname, file_checksum) - check_dir_file_names(path_file_dw, filename, dirname) + _check_dir_file_names(path_file_dw, filename, dirname) + +def _get_bunny_license_print(accept_license = False): + capturedOutput = io.StringIO() + # Redirect stdout + sys.stdout = capturedOutput + remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/bunny.off", "bunny.off", "remote_datasets/bunny", + '11852d5e73e2d4bd7b86a2c5cc8a5884d0fbb72539493e8cec100ea922b19f5b', accept_license) + # Reset redirect + sys.stdout = sys.__stdout__ + return capturedOutput def test_fetch_remote_datasets(): # Test fetch with a wrong checksum with pytest.raises(OSError): - check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", file_checksum = 'XXXXXXXXXX') + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", file_checksum = 'XXXXXXXXXX') # Test files download from given urls with checksums provided - check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') - check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') # Test files download from given urls without checksums - check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv") + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv") - check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") # Test spiral_2d.csv wrapping function path_file_dw = remote.fetch_spiral_2d() - check_dir_file_names(path_file_dw, 'spiral_2d.csv', 'remote_datasets') + _check_dir_file_names(path_file_dw, 'spiral_2d.csv', 'remote_datasets') + + # Test printing existing LICENSE file when fetching bunny.off with accept_license = False (default) + # Fetch LICENSE file + remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/LICENSE", "LICENSE", "remote_datasets/bunny", + 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956') + with open("remote_datasets/bunny/LICENSE") as f: + assert f.read() == _get_bunny_license_print().getvalue().rstrip("\n") + + # Test not printing bunny.off LICENSE when accept_license = True + assert "" == _get_bunny_license_print(accept_license = True).getvalue() + + # Test fetch_bunny wrapping function + path_file_dw = remote.fetch_bunny() + _check_dir_file_names(path_file_dw[0], 'LICENSE', 'remote_datasets/bunny') + _check_dir_file_names(path_file_dw[1], 'bunny.off', 'remote_datasets/bunny') -- cgit v1.2.3 From f09d7da77c8a7bc1a16abde3f11f611a4fd7b6f5 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 15 Nov 2021 18:44:34 +0100 Subject: update doc --- src/python/doc/cubical_complex_tflow_itf_ref.rst | 2 +- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 2 +- src/python/doc/rips_complex_tflow_itf_ref.rst | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index 582e0551..92727a75 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -12,7 +12,7 @@ Example of gradient computed from cubical persistence .. testcode:: - from gudhi.tensorflow import * + from gudhi.tensorflow import CubicalLayer import tensorflow as tf X = tf.Variable([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=tf.float32, trainable=True) diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index 6c8b5a08..0a6764fa 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -12,7 +12,7 @@ Example of gradient computed from lower-star filtration of a simplex tree .. testcode:: - from gudhi.tensorflow import * + from gudhi.tensorflow import LowerStarSimplexTreeLayer import tensorflow as tf import gudhi as gd diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index bd9c5da5..7aa77da6 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -12,7 +12,7 @@ Example of gradient computed from Vietoris-Rips persistence .. testcode:: - from gudhi.tensorflow import * + from gudhi.tensorflow import RipsLayer import tensorflow as tf X = tf.Variable([[1.,1.],[2.,2.]], dtype=tf.float32, trainable=True) -- cgit v1.2.3 From 7b83812e37986c9adf9cccaeab360f1d4ffa846f Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Wed, 17 Nov 2021 00:18:33 +0100 Subject: fix doc --- src/python/doc/cubical_complex_tflow_itf_ref.rst | 2 +- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 13 +------------ src/python/doc/rips_complex_tflow_itf_ref.rst | 2 +- src/python/gudhi/tensorflow/cubical_layer.py | 2 +- .../gudhi/tensorflow/lower_star_simplex_tree_layer.py | 2 +- src/python/gudhi/tensorflow/rips_layer.py | 2 +- 6 files changed, 6 insertions(+), 17 deletions(-) diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index 92727a75..692191ba 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -16,7 +16,7 @@ Example of gradient computed from cubical persistence import tensorflow as tf X = tf.Variable([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=tf.float32, trainable=True) - cl = CubicalLayer(dimension=0) + cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: dgm = cl.call(X) diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index 0a6764fa..3200b8e5 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -17,17 +17,6 @@ Example of gradient computed from lower-star filtration of a simplex tree import gudhi as gd st = gd.SimplexTree() - st.insert([0]) - st.insert([1]) - st.insert([2]) - st.insert([3]) - st.insert([4]) - st.insert([5]) - st.insert([6]) - st.insert([7]) - st.insert([8]) - st.insert([9]) - st.insert([10]) st.insert([0, 1]) st.insert([1, 2]) st.insert([2, 3]) @@ -40,7 +29,7 @@ Example of gradient computed from lower-star filtration of a simplex tree st.insert([9, 10]) F = tf.Variable([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=tf.float32, trainable=True) - sl = LowerStarSimplexTreeLayer(simplextree=st, dimension=0) + sl = LowerStarSimplexTreeLayer(simplextree=st, dimensions=[0]) with tf.GradientTape() as tape: dgm = sl.call(F) diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 7aa77da6..fc42e5c9 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -16,7 +16,7 @@ Example of gradient computed from Vietoris-Rips persistence import tensorflow as tf X = tf.Variable([[1.,1.],[2.,2.]], dtype=tf.float32, trainable=True) - rl = RipsLayer(maximum_edge_length=2., dimension=0) + rl = RipsLayer(maximum_edge_length=2., dimensions=[0]) with tf.GradientTape() as tape: dgm = rl.call(X) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 0971a446..d07a4cd8 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -42,7 +42,7 @@ class CubicalLayer(tf.keras.layers.Layer): Constructor for the CubicalLayer class Parameters: - dimensions (list of int): homology dimensions + dimensions (List[int]): homology dimensions """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index fd1698ea..aa55604a 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -49,7 +49,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): Parameters: simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n = number of vertices - dimensions (int): homology dimensions + dimensions (List[int]): homology dimensions """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index da7087f6..472a418b 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -46,7 +46,7 @@ class RipsLayer(tf.keras.layers.Layer): Parameters: maximum_edge_length (float): maximum edge length for the Rips complex - dimensions (int): homology dimensions + dimensions (List[int]): homology dimensions """ super().__init__(dynamic=True, **kwargs) self.max_edge = maximum_edge_length -- cgit v1.2.3 From b966a15818fd7a397ed6edc2b17ee6e188df6df0 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 22 Nov 2021 23:58:49 +0100 Subject: small change on doc --- src/python/doc/differentiation_sum.inc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/python/doc/differentiation_sum.inc b/src/python/doc/differentiation_sum.inc index 3dd8e59c..3aec33df 100644 --- a/src/python/doc/differentiation_sum.inc +++ b/src/python/doc/differentiation_sum.inc @@ -8,4 +8,5 @@ We provide TensorFlow 2 models that can handle automatic differentiation for the computation of persistence diagrams from complexes available in the Gudhi library. This includes simplex trees, cubical complexes and Vietoris-Rips complexes. Detailed example on how to use these layers in practice are available -in the following `notebook `_. +in the following `notebook `_. Note that even if TensorFlow GPU is enabled, all +internal computations using Gudhi will be done on CPU. -- cgit v1.2.3 From 979d12e00b4ea71391d132589ee3304e378459b9 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Sat, 4 Dec 2021 12:41:59 +0100 Subject: added min persistence --- src/python/gudhi/tensorflow/cubical_layer.py | 12 +++++------- .../gudhi/tensorflow/lower_star_simplex_tree_layer.py | 14 ++++++-------- src/python/gudhi/tensorflow/rips_layer.py | 12 +++++------- 3 files changed, 16 insertions(+), 22 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index d07a4cd8..8fe9cff0 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -8,7 +8,7 @@ from ..cubical_complex import CubicalComplex # The parameters of the model are the pixel values. -def _Cubical(Xflat, Xdim, dimensions): +def _Cubical(Xflat, Xdim, dimensions, min_persistence): # Parameters: Xflat (flattened image), # Xdim (shape of non-flattened image) # dimensions (homology dimensions) @@ -16,7 +16,7 @@ def _Cubical(Xflat, Xdim, dimensions): # Compute the persistence pairs with Gudhi # We reverse the dimensions because CubicalComplex uses Fortran ordering cc = CubicalComplex(dimensions=Xdim[::-1], top_dimensional_cells=Xflat) - cc.compute_persistence() + cc.compute_persistence(min_persistence=min_persistence) # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices cof_pp = cc.cofaces_of_persistence_pairs() @@ -37,7 +37,7 @@ class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing cubical persistence out of a cubical complex """ - def __init__(self, dimensions, **kwargs): + def __init__(self, dimensions, min_persistence=0., **kwargs): """ Constructor for the CubicalLayer class @@ -46,9 +46,7 @@ class CubicalLayer(tf.keras.layers.Layer): """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions - - def build(self): - super.build() + self.min_persistence = min_persistence def call(self, X): """ @@ -64,7 +62,7 @@ class CubicalLayer(tf.keras.layers.Layer): # Don't compute gradient for this operation Xflat = tf.reshape(X, [-1]) Xdim = X.shape - indices = _Cubical(Xflat.numpy(), Xdim, self.dimensions) + indices = _Cubical(Xflat.numpy(), Xdim, self.dimensions, self.min_persistence) # Get persistence diagram by simply picking the corresponding entries in the image self.dgms = [tf.reshape(tf.gather(Xflat, indice), [-1,2]) for indice in indices] return self.dgms diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index aa55604a..5902e4a1 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -7,7 +7,7 @@ import tensorflow as tf # The parameters of the model are the vertex function values of the simplex tree. -def _LowerStarSimplexTree(simplextree, filtration, dimensions): +def _LowerStarSimplexTree(simplextree, filtration, dimensions, min_persistence): # Parameters: simplextree (simplex tree on which to compute persistence) # filtration (function values on the vertices of st), # dimensions (homology dimensions), @@ -21,7 +21,7 @@ def _LowerStarSimplexTree(simplextree, filtration, dimensions): simplextree.make_filtration_non_decreasing() # Compute persistence diagram - simplextree.compute_persistence() + simplextree.compute_persistence(min_persistence=min_persistence) # Get vertex pairs for optimization. First, get all simplex pairs pairs = simplextree.lower_star_persistence_generators() @@ -43,7 +43,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing lower-star persistence out of a simplex tree """ - def __init__(self, simplextree, dimensions, **kwargs): + def __init__(self, simplextree, dimensions, min_persistence=0., **kwargs): """ Constructor for the LowerStarSimplexTreeLayer class @@ -54,10 +54,8 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions self.simplextree = simplextree - - def build(self): - super.build() - + self.min_persistence = min_persistence + def call(self, filtration): """ Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree @@ -69,7 +67,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): dgms (list of tuple of TensorFlow variables): list of lower-star persistence diagrams of length self.dimensions, where each element of the list is a tuple that contains the finite and essential persistence diagrams of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Don't try to compute gradients for the vertex pairs - indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions) + indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions, self.min_persistence) # Get persistence diagrams self.dgms = [] for idx_dim, dimension in enumerate(self.dimensions): diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 472a418b..97f28d74 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -8,7 +8,7 @@ from ..rips_complex import RipsComplex # The parameters of the model are the point coordinates. -def _Rips(DX, max_edge, dimensions): +def _Rips(DX, max_edge, dimensions, min_persistence): # Parameters: DX (distance matrix), # max_edge (maximum edge length for Rips filtration), # dimensions (homology dimensions) @@ -16,7 +16,7 @@ def _Rips(DX, max_edge, dimensions): # Compute the persistence pairs with Gudhi rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) st = rc.create_simplex_tree(max_dimension=max(dimensions)+1) - st.compute_persistence() + st.compute_persistence(min_persistence=min_persistence) pairs = st.flag_persistence_generators() L_indices = [] @@ -40,7 +40,7 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, dimensions, maximum_edge_length=12, **kwargs): + def __init__(self, dimensions, maximum_edge_length=12, min_persistence=0., **kwargs): """ Constructor for the RipsLayer class @@ -51,9 +51,7 @@ class RipsLayer(tf.keras.layers.Layer): super().__init__(dynamic=True, **kwargs) self.max_edge = maximum_edge_length self.dimensions = dimensions - - def build(self): - super.build() + self.min_persistence = min_persistence def call(self, X): """ @@ -69,7 +67,7 @@ class RipsLayer(tf.keras.layers.Layer): DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) # Compute vertices associated to positive and negative simplices # Don't compute gradient for this operation - indices = _Rips(DX.numpy(), self.max_edge, self.dimensions) + indices = _Rips(DX.numpy(), self.max_edge, self.dimensions, self.min_persistence) # Get persistence diagrams by simply picking the corresponding entries in the distance matrix self.dgms = [] for idx_dim, dimension in enumerate(self.dimensions): -- cgit v1.2.3 From 96c7e5ce2f0146798f66c89421b0d23e98a2a390 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Sat, 4 Dec 2021 13:22:23 +0100 Subject: update code and doc --- src/python/gudhi/tensorflow/cubical_layer.py | 20 ++++++++++++++------ .../tensorflow/lower_star_simplex_tree_layer.py | 21 ++++++++++++++------- src/python/gudhi/tensorflow/rips_layer.py | 19 +++++++++++++------ 3 files changed, 41 insertions(+), 19 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 8fe9cff0..b16c512f 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -8,7 +8,7 @@ from ..cubical_complex import CubicalComplex # The parameters of the model are the pixel values. -def _Cubical(Xflat, Xdim, dimensions, min_persistence): +def _Cubical(Xflat, Xdim, dimensions): # Parameters: Xflat (flattened image), # Xdim (shape of non-flattened image) # dimensions (homology dimensions) @@ -16,7 +16,7 @@ def _Cubical(Xflat, Xdim, dimensions, min_persistence): # Compute the persistence pairs with Gudhi # We reverse the dimensions because CubicalComplex uses Fortran ordering cc = CubicalComplex(dimensions=Xdim[::-1], top_dimensional_cells=Xflat) - cc.compute_persistence(min_persistence=min_persistence) + cc.compute_persistence() # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices cof_pp = cc.cofaces_of_persistence_pairs() @@ -37,17 +37,19 @@ class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing cubical persistence out of a cubical complex """ - def __init__(self, dimensions, min_persistence=0., **kwargs): + def __init__(self, dimensions, min_persistence=None, **kwargs): """ Constructor for the CubicalLayer class Parameters: dimensions (List[int]): homology dimensions + min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions - self.min_persistence = min_persistence - + self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] + assert len(self.min_persistence) == len(self.dimensions) + def call(self, X): """ Compute persistence diagram associated to a cubical complex filtered by some pixel values @@ -62,7 +64,13 @@ class CubicalLayer(tf.keras.layers.Layer): # Don't compute gradient for this operation Xflat = tf.reshape(X, [-1]) Xdim = X.shape - indices = _Cubical(Xflat.numpy(), Xdim, self.dimensions, self.min_persistence) + indices = _Cubical(Xflat.numpy(), Xdim, self.dimensions) # Get persistence diagram by simply picking the corresponding entries in the image self.dgms = [tf.reshape(tf.gather(Xflat, indice), [-1,2]) for indice in indices] + for idx_dim in range(len(self.min_persistence)): + min_pers = self.min_persistence[idx_dim] + if min_pers >= 0: + finite_dgm = self.dgms[idx_dim] + persistent_indices = np.argwhere(np.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers).ravel() + self.dgms[idx_dim] = tf.gather(finite_dgm, indices=persistent_indices) return self.dgms diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index 5902e4a1..e1627944 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -7,7 +7,7 @@ import tensorflow as tf # The parameters of the model are the vertex function values of the simplex tree. -def _LowerStarSimplexTree(simplextree, filtration, dimensions, min_persistence): +def _LowerStarSimplexTree(simplextree, filtration, dimensions): # Parameters: simplextree (simplex tree on which to compute persistence) # filtration (function values on the vertices of st), # dimensions (homology dimensions), @@ -21,7 +21,7 @@ def _LowerStarSimplexTree(simplextree, filtration, dimensions, min_persistence): simplextree.make_filtration_non_decreasing() # Compute persistence diagram - simplextree.compute_persistence(min_persistence=min_persistence) + simplextree.compute_persistence() # Get vertex pairs for optimization. First, get all simplex pairs pairs = simplextree.lower_star_persistence_generators() @@ -43,19 +43,21 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing lower-star persistence out of a simplex tree """ - def __init__(self, simplextree, dimensions, min_persistence=0., **kwargs): + def __init__(self, simplextree, dimensions, min_persistence=None, **kwargs): """ Constructor for the LowerStarSimplexTreeLayer class Parameters: simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n = number of vertices dimensions (List[int]): homology dimensions + min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions self.simplextree = simplextree - self.min_persistence = min_persistence - + self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] + assert len(self.min_persistence) == len(self.dimensions) + def call(self, filtration): """ Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree @@ -67,12 +69,17 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): dgms (list of tuple of TensorFlow variables): list of lower-star persistence diagrams of length self.dimensions, where each element of the list is a tuple that contains the finite and essential persistence diagrams of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Don't try to compute gradients for the vertex pairs - indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions, self.min_persistence) + indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions) # Get persistence diagrams self.dgms = [] for idx_dim, dimension in enumerate(self.dimensions): finite_dgm = tf.reshape(tf.gather(filtration, indices[idx_dim][0]), [-1,2]) essential_dgm = tf.reshape(tf.gather(filtration, indices[idx_dim][1]), [-1,1]) - self.dgms.append((finite_dgm, essential_dgm)) + min_pers = self.min_persistence[idx_dim] + if min_pers >= 0: + persistent_indices = np.argwhere(np.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers).ravel() + self.dgms.append((tf.gather(finite_dgm, indices=persistent_indices), essential_dgm)) + else: + self.dgms.append((finite_dgm, essential_dgm)) return self.dgms diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 97f28d74..a5f212e3 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -8,7 +8,7 @@ from ..rips_complex import RipsComplex # The parameters of the model are the point coordinates. -def _Rips(DX, max_edge, dimensions, min_persistence): +def _Rips(DX, max_edge, dimensions): # Parameters: DX (distance matrix), # max_edge (maximum edge length for Rips filtration), # dimensions (homology dimensions) @@ -16,7 +16,7 @@ def _Rips(DX, max_edge, dimensions, min_persistence): # Compute the persistence pairs with Gudhi rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) st = rc.create_simplex_tree(max_dimension=max(dimensions)+1) - st.compute_persistence(min_persistence=min_persistence) + st.compute_persistence() pairs = st.flag_persistence_generators() L_indices = [] @@ -40,18 +40,20 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, dimensions, maximum_edge_length=12, min_persistence=0., **kwargs): + def __init__(self, dimensions, maximum_edge_length=12, min_persistence=None, **kwargs): """ Constructor for the RipsLayer class Parameters: maximum_edge_length (float): maximum edge length for the Rips complex dimensions (List[int]): homology dimensions + min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) """ super().__init__(dynamic=True, **kwargs) self.max_edge = maximum_edge_length self.dimensions = dimensions - self.min_persistence = min_persistence + self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] + assert len(self.min_persistence) == len(self.dimensions) def call(self, X): """ @@ -67,7 +69,7 @@ class RipsLayer(tf.keras.layers.Layer): DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) # Compute vertices associated to positive and negative simplices # Don't compute gradient for this operation - indices = _Rips(DX.numpy(), self.max_edge, self.dimensions, self.min_persistence) + indices = _Rips(DX.numpy(), self.max_edge, self.dimensions) # Get persistence diagrams by simply picking the corresponding entries in the distance matrix self.dgms = [] for idx_dim, dimension in enumerate(self.dimensions): @@ -79,6 +81,11 @@ class RipsLayer(tf.keras.layers.Layer): reshaped_cur_idx = tf.reshape(cur_idx[0], [-1,3]) finite_dgm = tf.concat([tf.zeros([reshaped_cur_idx.shape[0],1]), tf.reshape(tf.gather_nd(DX, reshaped_cur_idx[:,1:]), [-1,1])], axis=1) essential_dgm = tf.zeros([cur_idx[1].shape[0],1]) - self.dgms.append((finite_dgm, essential_dgm)) + min_pers = self.min_persistence[idx_dim] + if min_pers >= 0: + persistent_indices = np.argwhere(np.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers).ravel() + self.dgms.append((tf.gather(finite_dgm, indices=persistent_indices), essential_dgm)) + else: + self.dgms.append((finite_dgm, essential_dgm)) return self.dgms -- cgit v1.2.3 From 9db268b5ecf056b87ee2f66c6d3f83de93a8681f Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 29 Dec 2021 15:38:59 +0100 Subject: Get min sphere without using a set Move face_points declaration Remove face_sh redundant variable --- .../include/gudhi/Cech_complex_blocker.h | 30 ++++++++-------------- 1 file changed, 11 insertions(+), 19 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 5edd005d..f7f86534 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -63,16 +63,6 @@ class Cech_blocker { return std::make_pair(std::move(c), std::move(r)); } - template - class CompareSpheresRadii - { - public: - CGAL::NT_converter cast_to_double; - bool operator()(const Sphere& firstSphere, const Sphere& secondSphere) - { - return cast_to_double(firstSphere.second) < cast_to_double(secondSphere.second); - } - }; /** \internal \brief Čech complex blocker operator() - the oracle - assigns the filtration value from the simplex * radius and returns if the simplex expansion must be blocked. @@ -84,7 +74,10 @@ class Cech_blocker { Filtration_value radius = 0.; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices - std::set > enclosing_ball_spheres; + Sphere min_enclos_ball; + CGAL::NT_converter cast_to_FT; + min_enclos_ball.second = cast_to_FT(std::numeric_limits::max()); + Point_cloud face_points; for (auto face : sc_ptr_->boundary_simplex_range(sh)) { // Find which vertex of sh is missing in face. We rely on the fact that simplex_vertex_range is sorted. auto longlist = sc_ptr_->simplex_vertex_range(sh); @@ -96,7 +89,6 @@ class Cech_blocker { while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; } auto extra = *longiter; // Vertex_handle - Point_cloud face_points; for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { face_points.push_back(cc_ptr_->get_point(vertex)); #ifdef DEBUG_TRACES @@ -104,30 +96,30 @@ class Cech_blocker { #endif // DEBUG_TRACES } Sphere sph; - auto face_sh = sc_ptr_->find(sc_ptr_->simplex_vertex_range(face)); - auto k = sc_ptr_->key(face_sh); + auto k = sc_ptr_->key(face); if(k != sc_ptr_->null_key()) { sph = cc_ptr_->get_cache().at(k); } else { sph = get_sphere(face_points.cbegin(), face_points.cend()); } + face_points.clear(); if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { radius = std::sqrt(cast_to_double(sph.second)); #ifdef DEBUG_TRACES std::clog << "circumcenter: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES - enclosing_ball_spheres.insert(sph); + if (cast_to_double(sph.second) < cast_to_double(min_enclos_ball.second)) + min_enclos_ball = sph; } } // Get the minimal radius of all faces enclosing balls if exists - if (!enclosing_ball_spheres.empty()) { - Sphere sph_min = *enclosing_ball_spheres.begin(); - radius = std::sqrt(cast_to_double(sph_min.second)); + if(cast_to_double(min_enclos_ball.second) != std::numeric_limits::max()) { + radius = std::sqrt(cast_to_double(min_enclos_ball.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); - cc_ptr_->get_cache().push_back(sph_min); + cc_ptr_->get_cache().push_back(min_enclos_ball); } if (radius == 0.) { // Spheres of each face don't contain the whole simplex -- cgit v1.2.3 From b1f40dd2c4397c1975533c54a54538160c727d55 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 6 Jan 2022 11:39:05 +0100 Subject: Make kernel a parameter of Minimal_enclosing_ball_radius class Use Epick in cech benchmark instead of Epeck --- src/Cech_complex/benchmark/cech_complex_benchmark.cpp | 8 ++++---- src/Cech_complex/include/gudhi/Cech_complex.h | 3 +-- .../include/gudhi/Cech_complex/Cech_kernel.h | 16 ++++++++-------- src/Cech_complex/test/test_cech_complex.cpp | 4 ++-- 4 files changed, 15 insertions(+), 16 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index 06d90757..e715b513 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -17,7 +17,7 @@ #include #include -#include // For EXACT or SAFE version +#include #include "boost/filesystem.hpp" // includes all needed Boost.Filesystem declarations @@ -32,7 +32,7 @@ using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; using Proximity_graph = Gudhi::Proximity_graph; using Rips_complex = Gudhi::rips_complex::Rips_complex; -using Kernel = CGAL::Epeck_d; +using Kernel = CGAL::Epick_d>; using Point_cgal = typename Kernel::Point_d; using Point_cloud_cgal = std::vector; using Points_off_reader_cgal = Gudhi::Points_off_reader; @@ -86,7 +86,7 @@ int main(int argc, char* argv[]) { Gudhi::Clock cgal_miniball_clock("Gudhi::Minimal_enclosing_ball_radius_cgal()"); // Compute the proximity graph of the points Proximity_graph cgal_miniball_prox_graph = Gudhi::compute_proximity_graph( - off_reader_cgal.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius()); + off_reader_cgal.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius()); std::clog << cgal_miniball_clock << std::endl; boost::filesystem::path full_path(boost::filesystem::current_path()); @@ -109,7 +109,7 @@ int main(int argc, char* argv[]) { std::clog << radius << ";"; Gudhi::Clock rips_clock("Rips computation"); Rips_complex rips_complex_from_points(off_reader_cgal.get_point_cloud(), radius, - Gudhi::Minimal_enclosing_ball_radius()); + Gudhi::Minimal_enclosing_ball_radius()); Simplex_tree rips_stree; rips_complex_from_points.create_complex(rips_stree, p0.size() - 1); // ------------------------------------------ diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index 7bbf97d1..0031d861 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -18,7 +18,6 @@ #include #include // for exception management -#include namespace Gudhi { @@ -78,7 +77,7 @@ class Cech_complex { point_cloud_.assign(points.begin(), points.end()); cech_skeleton_graph_ = Gudhi::compute_proximity_graph( - point_cloud_, max_radius_, Gudhi::Minimal_enclosing_ball_radius()); + point_cloud_, max_radius_, Gudhi::Minimal_enclosing_ball_radius()); } /** \brief Initializes the simplicial complex from the proximity graph and expands it until a given maximal diff --git a/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h b/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h index 348bb57d..89012206 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h +++ b/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h @@ -11,9 +11,10 @@ #ifndef CECH_KERNEL_H_ #define CECH_KERNEL_H_ -#include +#include // for #include #include // for std::sqrt +#include namespace Gudhi { @@ -21,8 +22,14 @@ namespace Gudhi { /** @brief Compute the radius of the minimal enclosing ball between Points given by a range of coordinates. * The points are assumed to have the same dimension. */ +template class Minimal_enclosing_ball_radius { + private: + Kernel kernel_; public: + using Point = typename Kernel::Point_d; + using Point_cloud = typename std::vector; + /** \brief Enclosing ball radius from two points using CGAL. * * @param[in] point_1 @@ -31,10 +38,7 @@ class Minimal_enclosing_ball_radius { * \tparam Point must be a Kernel::Point_d from CGAL. * */ - template< typename Kernel = CGAL::Epeck_d, - typename Point= typename Kernel::Point_d> double operator()(const Point& point_1, const Point& point_2) const { - Kernel kernel_; return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; } @@ -46,11 +50,7 @@ class Minimal_enclosing_ball_radius { * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. * */ - template< typename Kernel = CGAL::Epeck_d, - typename Point= typename Kernel::Point_d, - typename Point_cloud = std::vector> double operator()(const Point_cloud& point_cloud) const { - Kernel kernel_; return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); } diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index 7d8c3c22..ca7a9778 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -108,11 +108,11 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { std::clog << vertex << ","; vp.push_back(points.at(vertex)); } - std::clog << ") - distance =" << Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1)) + std::clog << ") - distance =" << Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1)) << " - filtration =" << st.filtration(f_simplex) << std::endl; BOOST_CHECK(vp.size() == 2); GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), - Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1))); + Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1))); } } -- cgit v1.2.3 From beb431316a5181caf0eec5c0940601457340cc58 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 6 Jan 2022 13:40:33 +0100 Subject: Add left out kernel to Minimal_enclosing_ball_radius class in cech example --- src/Cech_complex/example/cech_complex_step_by_step.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index 2d8321b1..c8dd1585 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -52,7 +52,7 @@ class Cech_blocker { std::clog << "#(" << vertex << ")#"; #endif // DEBUG_TRACES } - Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points); + Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points); #ifdef DEBUG_TRACES std::clog << "radius = " << radius << " - " << (radius > max_radius_) << std::endl; #endif // DEBUG_TRACES @@ -83,7 +83,7 @@ int main(int argc, char* argv[]) { // Compute the proximity graph of the points Proximity_graph prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), max_radius, - Gudhi::Minimal_enclosing_ball_radius()); + Gudhi::Minimal_enclosing_ball_radius()); // Construct the Cech complex in a Simplex Tree Simplex_tree st; -- cgit v1.2.3 From d941ebc854880a06707999f677137a9d6ff7473f Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 26 Jan 2022 15:21:20 +0100 Subject: Add datasets remote fetching module to doc --- src/python/doc/datasets.inc | 14 ++++ src/python/doc/datasets.rst | 118 +++++++++++++++++++++++++++++++++ src/python/doc/datasets_generators.inc | 14 ---- src/python/doc/datasets_generators.rst | 105 ----------------------------- src/python/doc/index.rst | 6 +- 5 files changed, 135 insertions(+), 122 deletions(-) create mode 100644 src/python/doc/datasets.inc create mode 100644 src/python/doc/datasets.rst delete mode 100644 src/python/doc/datasets_generators.inc delete mode 100644 src/python/doc/datasets_generators.rst diff --git a/src/python/doc/datasets.inc b/src/python/doc/datasets.inc new file mode 100644 index 00000000..95a87678 --- /dev/null +++ b/src/python/doc/datasets.inc @@ -0,0 +1,14 @@ +.. table:: + :widths: 30 40 30 + + +-----------------------------------+--------------------------------------------+--------------------------------------------------------------------------------------+ + | .. figure:: | Datasets either generated or fetched. | :Authors: Hind Montassif | + | img/sphere_3d.png | | | + | | | :Since: GUDHI 3.5.0 | + | | | | + | | | :License: MIT (`LGPL v3 `_) | + | | | | + | | | :Requires: `CGAL `_ | + +-----------------------------------+--------------------------------------------+--------------------------------------------------------------------------------------+ + | * :doc:`datasets` | + +-----------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/datasets.rst b/src/python/doc/datasets.rst new file mode 100644 index 00000000..4fa8a628 --- /dev/null +++ b/src/python/doc/datasets.rst @@ -0,0 +1,118 @@ + +:orphan: + +.. To get rid of WARNING: document isn't included in any toctree + +================ +Datasets manual +================ + +Datasets generators +=================== + +We provide the generation of different customizable datasets to use as inputs for Gudhi complexes and data structures. + +Points generators +------------------ + +The module **points** enables the generation of random points on a sphere, random points on a torus and as a grid. + +Points on sphere +^^^^^^^^^^^^^^^^ + +The function **sphere** enables the generation of random i.i.d. points uniformly on a (d-1)-sphere in :math:`R^d`. +The user should provide the number of points to be generated on the sphere :code:`n_samples` and the ambient dimension :code:`ambient_dim`. +The :code:`radius` of sphere is optional and is equal to **1** by default. +Only random points generation is currently available. + +The generated points are given as an array of shape :math:`(n\_samples, ambient\_dim)`. + +Example +""""""" + +.. code-block:: python + + from gudhi.datasets.generators import points + from gudhi import AlphaComplex + + # Generate 50 points on a sphere in R^2 + gen_points = points.sphere(n_samples = 50, ambient_dim = 2, radius = 1, sample = "random") + + # Create an alpha complex from the generated points + alpha_complex = AlphaComplex(points = gen_points) + +.. autofunction:: gudhi.datasets.generators.points.sphere + +Points on a flat torus +^^^^^^^^^^^^^^^^^^^^^^ + +You can also generate points on a torus. + +Two functions are available and give the same output: the first one depends on **CGAL** and the second does not and consists of full python code. + +On another hand, two sample types are provided: you can either generate i.i.d. points on a d-torus in :math:`R^{2d}` *randomly* or on a *grid*. + +First function: **ctorus** +""""""""""""""""""""""""""" + +The user should provide the number of points to be generated on the torus :code:`n_samples`, and the dimension :code:`dim` of the torus on which points would be generated in :math:`R^{2dim}`. +The :code:`sample` argument is optional and is set to **'random'** by default. +In this case, the returned generated points would be an array of shape :math:`(n\_samples, 2*dim)`. +Otherwise, if set to **'grid'**, the points are generated on a grid and would be given as an array of shape: + +.. math:: + + ( ⌊n\_samples^{1 \over {dim}}⌋^{dim}, 2*dim ) + +**Note 1:** The output array first shape is rounded down to the closest perfect :math:`dim^{th}` power. + +**Note 2:** This version is recommended when the user wishes to use **'grid'** as sample type, or **'random'** with a relatively small number of samples (~ less than 150). + +Example +""""""" +.. code-block:: python + + from gudhi.datasets.generators import points + + # Generate 50 points randomly on a torus in R^6 + gen_points = points.ctorus(n_samples = 50, dim = 3) + + # Generate 27 points on a torus as a grid in R^6 + gen_points = points.ctorus(n_samples = 50, dim = 3, sample = 'grid') + +.. autofunction:: gudhi.datasets.generators.points.ctorus + +Second function: **torus** +""""""""""""""""""""""""""" + +The user should provide the number of points to be generated on the torus :code:`n_samples` and the dimension :code:`dim` of the torus on which points would be generated in :math:`R^{2dim}`. +The :code:`sample` argument is optional and is set to **'random'** by default. +The other allowed value of sample type is **'grid'**. + +**Note:** This version is recommended when the user wishes to use **'random'** as sample type with a great number of samples and a low dimension. + +Example +""""""" +.. code-block:: python + + from gudhi.datasets.generators import points + + # Generate 50 points randomly on a torus in R^6 + gen_points = points.torus(n_samples = 50, dim = 3) + + # Generate 27 points on a torus as a grid in R^6 + gen_points = points.torus(n_samples = 50, dim = 3, sample = 'grid') + + +.. autofunction:: gudhi.datasets.generators.points.torus + + +Fetching datasets +================= + +We provide some ready-to-use datasets that are not available by default when getting GUDHI, and need to be fetched explicitly. + +.. automodule:: gudhi.datasets.remote + :members: + :special-members: + :show-inheritance: diff --git a/src/python/doc/datasets_generators.inc b/src/python/doc/datasets_generators.inc deleted file mode 100644 index 8d169275..00000000 --- a/src/python/doc/datasets_generators.inc +++ /dev/null @@ -1,14 +0,0 @@ -.. table:: - :widths: 30 40 30 - - +-----------------------------------+--------------------------------------------+--------------------------------------------------------------------------------------+ - | .. figure:: | Datasets generators (points). | :Authors: Hind Montassif | - | img/sphere_3d.png | | | - | | | :Since: GUDHI 3.5.0 | - | | | | - | | | :License: MIT (`LGPL v3 `_) | - | | | | - | | | :Requires: `CGAL `_ | - +-----------------------------------+--------------------------------------------+--------------------------------------------------------------------------------------+ - | * :doc:`datasets_generators` | - +-----------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/datasets_generators.rst b/src/python/doc/datasets_generators.rst deleted file mode 100644 index 260c3882..00000000 --- a/src/python/doc/datasets_generators.rst +++ /dev/null @@ -1,105 +0,0 @@ - -:orphan: - -.. To get rid of WARNING: document isn't included in any toctree - -=========================== -Datasets generators manual -=========================== - -We provide the generation of different customizable datasets to use as inputs for Gudhi complexes and data structures. - - -Points generators ------------------- - -The module **points** enables the generation of random points on a sphere, random points on a torus and as a grid. - -Points on sphere -^^^^^^^^^^^^^^^^ - -The function **sphere** enables the generation of random i.i.d. points uniformly on a (d-1)-sphere in :math:`R^d`. -The user should provide the number of points to be generated on the sphere :code:`n_samples` and the ambient dimension :code:`ambient_dim`. -The :code:`radius` of sphere is optional and is equal to **1** by default. -Only random points generation is currently available. - -The generated points are given as an array of shape :math:`(n\_samples, ambient\_dim)`. - -Example -""""""" - -.. code-block:: python - - from gudhi.datasets.generators import points - from gudhi import AlphaComplex - - # Generate 50 points on a sphere in R^2 - gen_points = points.sphere(n_samples = 50, ambient_dim = 2, radius = 1, sample = "random") - - # Create an alpha complex from the generated points - alpha_complex = AlphaComplex(points = gen_points) - -.. autofunction:: gudhi.datasets.generators.points.sphere - -Points on a flat torus -^^^^^^^^^^^^^^^^^^^^^^ - -You can also generate points on a torus. - -Two functions are available and give the same output: the first one depends on **CGAL** and the second does not and consists of full python code. - -On another hand, two sample types are provided: you can either generate i.i.d. points on a d-torus in :math:`R^{2d}` *randomly* or on a *grid*. - -First function: **ctorus** -""""""""""""""""""""""""""" - -The user should provide the number of points to be generated on the torus :code:`n_samples`, and the dimension :code:`dim` of the torus on which points would be generated in :math:`R^{2dim}`. -The :code:`sample` argument is optional and is set to **'random'** by default. -In this case, the returned generated points would be an array of shape :math:`(n\_samples, 2*dim)`. -Otherwise, if set to **'grid'**, the points are generated on a grid and would be given as an array of shape: - -.. math:: - - ( ⌊n\_samples^{1 \over {dim}}⌋^{dim}, 2*dim ) - -**Note 1:** The output array first shape is rounded down to the closest perfect :math:`dim^{th}` power. - -**Note 2:** This version is recommended when the user wishes to use **'grid'** as sample type, or **'random'** with a relatively small number of samples (~ less than 150). - -Example -""""""" -.. code-block:: python - - from gudhi.datasets.generators import points - - # Generate 50 points randomly on a torus in R^6 - gen_points = points.ctorus(n_samples = 50, dim = 3) - - # Generate 27 points on a torus as a grid in R^6 - gen_points = points.ctorus(n_samples = 50, dim = 3, sample = 'grid') - -.. autofunction:: gudhi.datasets.generators.points.ctorus - -Second function: **torus** -""""""""""""""""""""""""""" - -The user should provide the number of points to be generated on the torus :code:`n_samples` and the dimension :code:`dim` of the torus on which points would be generated in :math:`R^{2dim}`. -The :code:`sample` argument is optional and is set to **'random'** by default. -The other allowed value of sample type is **'grid'**. - -**Note:** This version is recommended when the user wishes to use **'random'** as sample type with a great number of samples and a low dimension. - -Example -""""""" -.. code-block:: python - - from gudhi.datasets.generators import points - - # Generate 50 points randomly on a torus in R^6 - gen_points = points.torus(n_samples = 50, dim = 3) - - # Generate 27 points on a torus as a grid in R^6 - gen_points = points.torus(n_samples = 50, dim = 3, sample = 'grid') - - -.. autofunction:: gudhi.datasets.generators.points.torus diff --git a/src/python/doc/index.rst b/src/python/doc/index.rst index 2d7921ae..35f4ba46 100644 --- a/src/python/doc/index.rst +++ b/src/python/doc/index.rst @@ -92,7 +92,7 @@ Clustering .. include:: clustering.inc -Datasets generators -******************* +Datasets +******** -.. include:: datasets_generators.inc +.. include:: datasets.inc -- cgit v1.2.3 From 8d1e7aeb3416194d00f45587d1ecea85ba218028 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 28 Jan 2022 16:21:33 +0100 Subject: Return arrays of points instead of files paths when fetching bunny.npy and spiral_2d.csv --- src/python/gudhi/datasets/remote.py | 83 +++++++++++++++++++++------------ src/python/test/test_remote_datasets.py | 33 +++++++------ 2 files changed, 72 insertions(+), 44 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 7e8f9ce7..ef797417 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -7,17 +7,17 @@ # Modification(s): # - YYYY/MM Author: Description of the modification -import hashlib - from os.path import join, exists from os import makedirs from urllib.request import urlretrieve +import hashlib +import numpy as np def _checksum_sha256(file_path): """ - Compute the file checksum using sha256 + Compute the file checksum using sha256. Parameters ---------- @@ -26,7 +26,7 @@ def _checksum_sha256(file_path): Returns ------- - The hex digest of file_path + The hex digest of file_path. """ sha256_hash = hashlib.sha256() chunk_size = 4096 @@ -39,9 +39,9 @@ def _checksum_sha256(file_path): sha256_hash.update(buffer) return sha256_hash.hexdigest() -def fetch(url, filename, dirname = "remote_datasets", file_checksum = None, accept_license = False): +def _fetch_remote(url, filename, dirname = "remote_datasets", file_checksum = None, accept_license = False): """ - Fetch the wanted dataset from the given url and save it in file_path + Fetch the wanted dataset from the given url and save it in file_path. Parameters ---------- @@ -56,7 +56,7 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None, acce Default is 'None'. accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. - Default is False + Default is False. Returns ------- @@ -66,14 +66,8 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None, acce file_path = join(dirname, filename) - # Check for an already existing file at file_path - if not exists(file_path): - # Create directory if not existing - if not exists(dirname): - makedirs(dirname) - - # Get the file - urlretrieve(url, file_path) + # Get the file + urlretrieve(url, file_path) if file_checksum is not None: checksum = _checksum_sha256(file_path) @@ -93,44 +87,71 @@ def fetch(url, filename, dirname = "remote_datasets", file_checksum = None, acce def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): """ - Fetch spiral_2d.csv remotely + Fetch "spiral_2d.csv" remotely. Parameters ---------- filename : string - The name to give to downloaded file. Default is "spiral_2d.csv" + The name to give to downloaded file. Default is "spiral_2d.csv". dirname : string The directory to save the file to. Default is "remote_datasets". Returns ------- - file_path: string - Full path of the created file. + points: array + Array of points stored in "spiral_2d.csv". """ - return fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", filename, dirname, - '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv" + file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38' -def fetch_bunny(filename = "bunny.off", dirname = "remote_datasets/bunny", accept_license = False): + archive_path = join(dirname, filename) + + if not exists(archive_path): + # Create directory if not existing + if not exists(dirname): + makedirs(dirname) + + file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum) + + return np.loadtxt(file_path_pkl) + else: + return np.loadtxt(archive_path) + +def fetch_bunny(filename = "bunny.npy", dirname = "remote_datasets/bunny", accept_license = False): """ - Fetch bunny.off remotely and its LICENSE file + Fetch "bunny.npy" remotely and its LICENSE file. Parameters ---------- filename : string - The name to give to downloaded file. Default is "bunny.off" + The name to give to downloaded file. Default is "bunny.npy". dirname : string The directory to save the file to. Default is "remote_datasets/bunny". accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. - Default is False + Default is False. Returns ------- - files_paths: list of strings - Full paths of the created file and its LICENSE. + points: array + Array of points stored in "bunny.npy". """ - return [fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/LICENSE", "LICENSE", dirname, - 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956'), - fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/bunny.off", filename, dirname, - '11852d5e73e2d4bd7b86a2c5cc8a5884d0fbb72539493e8cec100ea922b19f5b', accept_license)] + file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy" + file_checksum = '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b' + license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE" + license_checksum = 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956' + + archive_path = join(dirname, filename) + + if not exists(archive_path): + # Create directory if not existing + if not exists(dirname): + makedirs(dirname) + + license_path = _fetch_remote(license_url, "LICENSE", dirname, license_checksum) + file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum, accept_license) + + return np.load(file_path_pkl, mmap_mode='r') + else: + return np.load(archive_path, mmap_mode='r') diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index e777abc6..56a273b4 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -10,13 +10,14 @@ from gudhi.datasets import remote import re -import os.path +from os.path import isfile, exists +from os import makedirs import io import sys import pytest def _check_dir_file_names(path_file_dw, filename, dirname): - assert os.path.isfile(path_file_dw) + assert isfile(path_file_dw) names_dw = re.split(r' |/|\\', path_file_dw) # Case where inner directories are created in "remote_datasets/"; e.g: "remote_datasets/bunny" @@ -29,15 +30,20 @@ def _check_dir_file_names(path_file_dw, filename, dirname): assert filename == names_dw[1] def _check_fetch_output(url, filename, dirname = "remote_datasets", file_checksum = None): - path_file_dw = remote.fetch(url, filename, dirname, file_checksum) + if not exists(dirname): + makedirs(dirname) + path_file_dw = remote._fetch_remote(url, filename, dirname, file_checksum) _check_dir_file_names(path_file_dw, filename, dirname) def _get_bunny_license_print(accept_license = False): capturedOutput = io.StringIO() # Redirect stdout sys.stdout = capturedOutput - remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/bunny.off", "bunny.off", "remote_datasets/bunny", - '11852d5e73e2d4bd7b86a2c5cc8a5884d0fbb72539493e8cec100ea922b19f5b', accept_license) + + if not exists("remote_datasets/bunny"): + makedirs("remote_datasets/bunny") + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "remote_datasets/bunny", + '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b', accept_license) # Reset redirect sys.stdout = sys.__stdout__ return capturedOutput @@ -60,20 +66,21 @@ def test_fetch_remote_datasets(): _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") # Test spiral_2d.csv wrapping function - path_file_dw = remote.fetch_spiral_2d() - _check_dir_file_names(path_file_dw, 'spiral_2d.csv', 'remote_datasets') + spiral_2d_arr = remote.fetch_spiral_2d() + assert spiral_2d_arr.shape == (114562, 2) - # Test printing existing LICENSE file when fetching bunny.off with accept_license = False (default) + # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) # Fetch LICENSE file - remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/LICENSE", "LICENSE", "remote_datasets/bunny", + if not exists("remote_datasets/bunny"): + makedirs("remote_datasets/bunny") + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "remote_datasets/bunny", 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956') with open("remote_datasets/bunny/LICENSE") as f: assert f.read() == _get_bunny_license_print().getvalue().rstrip("\n") - # Test not printing bunny.off LICENSE when accept_license = True + # Test not printing bunny.npy LICENSE when accept_license = True assert "" == _get_bunny_license_print(accept_license = True).getvalue() # Test fetch_bunny wrapping function - path_file_dw = remote.fetch_bunny() - _check_dir_file_names(path_file_dw[0], 'LICENSE', 'remote_datasets/bunny') - _check_dir_file_names(path_file_dw[1], 'bunny.off', 'remote_datasets/bunny') + bunny_arr = remote.fetch_bunny() + assert bunny_arr.shape == (35947, 3) -- cgit v1.2.3 From ad7a50fb87ed4237b9a02165eac39ae355dd5440 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 1 Feb 2022 10:32:03 +0100 Subject: Fetch spiral_2d.npy file instead of csv Add some modifications related to those done on files in gudhi-data --- src/python/gudhi/datasets/remote.py | 20 ++++++++++---------- src/python/test/test_remote_datasets.py | 14 +++++++------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index ef797417..3498a645 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -85,24 +85,24 @@ def _fetch_remote(url, filename, dirname = "remote_datasets", file_checksum = No return file_path -def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): +def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = "remote_datasets/spiral_2d"): """ - Fetch "spiral_2d.csv" remotely. + Fetch "spiral_2d.npy" remotely. Parameters ---------- filename : string - The name to give to downloaded file. Default is "spiral_2d.csv". + The name to give to downloaded file. Default is "spiral_2d.npy". dirname : string - The directory to save the file to. Default is "remote_datasets". + The directory to save the file to. Default is "remote_datasets/spiral_2d". Returns ------- points: array - Array of points stored in "spiral_2d.csv". + Array of points stored in "spiral_2d.npy". """ - file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv" - file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38' + file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy" + file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf' archive_path = join(dirname, filename) @@ -113,9 +113,9 @@ def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum) - return np.loadtxt(file_path_pkl) + return np.load(file_path_pkl, mmap_mode='r') else: - return np.loadtxt(archive_path) + return np.load(archive_path, mmap_mode='r') def fetch_bunny(filename = "bunny.npy", dirname = "remote_datasets/bunny", accept_license = False): """ @@ -140,7 +140,7 @@ def fetch_bunny(filename = "bunny.npy", dirname = "remote_datasets/bunny", accep file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy" file_checksum = '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b' license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE" - license_checksum = 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956' + license_checksum = 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a' archive_path = join(dirname, filename) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 56a273b4..2057c63b 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -51,21 +51,21 @@ def _get_bunny_license_print(accept_license = False): def test_fetch_remote_datasets(): # Test fetch with a wrong checksum with pytest.raises(OSError): - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", file_checksum = 'XXXXXXXXXX') + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy", file_checksum = 'XXXXXXXXXX') # Test files download from given urls with checksums provided - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", - file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy", + file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf') _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') # Test files download from given urls without checksums - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv") + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy") _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") - # Test spiral_2d.csv wrapping function + # Test fetch_spiral_2d wrapping function spiral_2d_arr = remote.fetch_spiral_2d() assert spiral_2d_arr.shape == (114562, 2) @@ -74,9 +74,9 @@ def test_fetch_remote_datasets(): if not exists("remote_datasets/bunny"): makedirs("remote_datasets/bunny") remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "remote_datasets/bunny", - 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956') + 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a') with open("remote_datasets/bunny/LICENSE") as f: - assert f.read() == _get_bunny_license_print().getvalue().rstrip("\n") + assert f.read().rstrip("\n") == _get_bunny_license_print().getvalue().rstrip("\n") # Test not printing bunny.npy LICENSE when accept_license = True assert "" == _get_bunny_license_print(accept_license = True).getvalue() -- cgit v1.2.3 From 741f4f182479d1e5e78e9eb9180adce0a72e99b6 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 2 Feb 2022 10:38:15 +0100 Subject: Modify remote fetching test to increase its coverage --- src/python/test/test_remote_datasets.py | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 2057c63b..dac9ee80 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -40,8 +40,6 @@ def _get_bunny_license_print(accept_license = False): # Redirect stdout sys.stdout = capturedOutput - if not exists("remote_datasets/bunny"): - makedirs("remote_datasets/bunny") remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "remote_datasets/bunny", '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b', accept_license) # Reset redirect @@ -65,22 +63,17 @@ def test_fetch_remote_datasets(): _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") - # Test fetch_spiral_2d wrapping function - spiral_2d_arr = remote.fetch_spiral_2d() - assert spiral_2d_arr.shape == (114562, 2) + # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) + for i in range(2): + spiral_2d_arr = remote.fetch_spiral_2d() + assert spiral_2d_arr.shape == (114562, 2) + + bunny_arr = remote.fetch_bunny() + assert bunny_arr.shape == (35947, 3) # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) - # Fetch LICENSE file - if not exists("remote_datasets/bunny"): - makedirs("remote_datasets/bunny") - remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "remote_datasets/bunny", - 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a') with open("remote_datasets/bunny/LICENSE") as f: assert f.read().rstrip("\n") == _get_bunny_license_print().getvalue().rstrip("\n") # Test not printing bunny.npy LICENSE when accept_license = True assert "" == _get_bunny_license_print(accept_license = True).getvalue() - - # Test fetch_bunny wrapping function - bunny_arr = remote.fetch_bunny() - assert bunny_arr.shape == (35947, 3) -- cgit v1.2.3 From 19689c712a1f5945e664f9c74c14b6994e7afaaf Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 2 Feb 2022 16:14:17 +0100 Subject: Try to fix failing test in windows --- src/python/test/test_remote_datasets.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index dac9ee80..643485f9 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -40,6 +40,9 @@ def _get_bunny_license_print(accept_license = False): # Redirect stdout sys.stdout = capturedOutput + if not exists("remote_datasets/bunny"): + makedirs("remote_datasets/bunny") + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "remote_datasets/bunny", '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b', accept_license) # Reset redirect @@ -72,6 +75,11 @@ def test_fetch_remote_datasets(): assert bunny_arr.shape == (35947, 3) # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) + # Fetch LICENSE file + if not exists("remote_datasets/bunny"): + makedirs("remote_datasets/bunny") + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "remote_datasets/bunny", + 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a') with open("remote_datasets/bunny/LICENSE") as f: assert f.read().rstrip("\n") == _get_bunny_license_print().getvalue().rstrip("\n") -- cgit v1.2.3 From a2d55f9bbf0f45e3ae4c147f734ce04f5bc87ab8 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 2 Feb 2022 21:32:55 +0100 Subject: Another attempt to fix windows failing test: move fetch_bunny to the end --- src/python/test/test_remote_datasets.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 643485f9..5e607d73 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -66,14 +66,6 @@ def test_fetch_remote_datasets(): _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") - # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) - for i in range(2): - spiral_2d_arr = remote.fetch_spiral_2d() - assert spiral_2d_arr.shape == (114562, 2) - - bunny_arr = remote.fetch_bunny() - assert bunny_arr.shape == (35947, 3) - # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) # Fetch LICENSE file if not exists("remote_datasets/bunny"): @@ -85,3 +77,11 @@ def test_fetch_remote_datasets(): # Test not printing bunny.npy LICENSE when accept_license = True assert "" == _get_bunny_license_print(accept_license = True).getvalue() + + # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) + for i in range(2): + spiral_2d_arr = remote.fetch_spiral_2d() + assert spiral_2d_arr.shape == (114562, 2) + + bunny_arr = remote.fetch_bunny() + assert bunny_arr.shape == (35947, 3) -- cgit v1.2.3 From 5c00d2dfcf4b0e2835441533f12f195d83652e99 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Wed, 2 Feb 2022 22:11:04 +0100 Subject: fixed bugs from the new API --- src/python/CMakeLists.txt | 3 ++- src/python/doc/cubical_complex_tflow_itf_ref.rst | 2 +- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 2 +- src/python/doc/rips_complex_tflow_itf_ref.rst | 2 +- src/python/gudhi/tensorflow/cubical_layer.py | 8 ++++---- src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py | 3 +-- src/python/gudhi/tensorflow/rips_layer.py | 4 ++-- src/python/test/test_diff.py | 4 ++-- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index bed2b541..e4ac1b48 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -556,6 +556,7 @@ if(PYTHONINTERP_FOUND) # Differentiation if(TENSORFLOW_FOUND) add_gudhi_py_test(test_diff) + endif() # Betti curves if(SKLEARN_FOUND AND SCIPY_FOUND) @@ -596,4 +597,4 @@ if(PYTHONINTERP_FOUND) else(PYTHONINTERP_FOUND) message("++ Python module will not be compiled because no Python interpreter was found") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES") -endif(PYTHONINTERP_FOUND) \ No newline at end of file +endif(PYTHONINTERP_FOUND) diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index 692191ba..18b97adf 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -19,7 +19,7 @@ Example of gradient computed from cubical persistence cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X) + dgm = cl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index 3200b8e5..56bb4492 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -32,7 +32,7 @@ Example of gradient computed from lower-star filtration of a simplex tree sl = LowerStarSimplexTreeLayer(simplextree=st, dimensions=[0]) with tf.GradientTape() as tape: - dgm = sl.call(F) + dgm = sl.call(F)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [F]) diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index fc42e5c9..104b0971 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -19,7 +19,7 @@ Example of gradient computed from Vietoris-Rips persistence rl = RipsLayer(maximum_edge_length=2., dimensions=[0]) with tf.GradientTape() as tape: - dgm = rl.call(X) + dgm = rl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index b16c512f..99d02d66 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -47,7 +47,7 @@ class CubicalLayer(tf.keras.layers.Layer): """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions - self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] + self.min_persistence = min_persistence if min_persistence != None else [0.] * len(self.dimensions) assert len(self.min_persistence) == len(self.dimensions) def call(self, X): @@ -64,13 +64,13 @@ class CubicalLayer(tf.keras.layers.Layer): # Don't compute gradient for this operation Xflat = tf.reshape(X, [-1]) Xdim = X.shape - indices = _Cubical(Xflat.numpy(), Xdim, self.dimensions) + indices_list = _Cubical(Xflat.numpy(), Xdim, self.dimensions) # Get persistence diagram by simply picking the corresponding entries in the image - self.dgms = [tf.reshape(tf.gather(Xflat, indice), [-1,2]) for indice in indices] + self.dgms = [tf.reshape(tf.gather(Xflat, indices), [-1,2]) for indices in indices_list] for idx_dim in range(len(self.min_persistence)): min_pers = self.min_persistence[idx_dim] if min_pers >= 0: finite_dgm = self.dgms[idx_dim] - persistent_indices = np.argwhere(np.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers).ravel() + persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers) self.dgms[idx_dim] = tf.gather(finite_dgm, indices=persistent_indices) return self.dgms diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index e1627944..8da1f7fe 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -12,8 +12,7 @@ def _LowerStarSimplexTree(simplextree, filtration, dimensions): # filtration (function values on the vertices of st), # dimensions (homology dimensions), - for s,_ in simplextree.get_filtration(): - simplextree.assign_filtration(s, -1e10) + simplextree.reset_filtration(-np.inf, 0) # Assign new filtration values for i in range(simplextree.num_vertices()): diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index a5f212e3..88d501c1 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -40,7 +40,7 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, dimensions, maximum_edge_length=12, min_persistence=None, **kwargs): + def __init__(self, dimensions, maximum_edge_length=np.inf, min_persistence=None, **kwargs): """ Constructor for the RipsLayer class @@ -66,7 +66,7 @@ class RipsLayer(tf.keras.layers.Layer): dgms (list of tuple of TensorFlow variables): list of Rips persistence diagrams of length self.dimensions, where each element of the list is a tuple that contains the finite and essential persistence diagrams of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Compute distance matrix - DX = tf.math.sqrt(tf.reduce_sum((tf.expand_dims(X, 1)-tf.expand_dims(X, 0))**2, 2)) + DX = tf.norm(tf.expand_dims(X, 1)-tf.expand_dims(X, 0), axis=2) # Compute vertices associated to positive and negative simplices # Don't compute gradient for this operation indices = _Rips(DX.numpy(), self.max_edge, self.dimensions) diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index e0c99d07..bab0d10c 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -22,7 +22,7 @@ def test_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0] + dgm = cl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 @@ -34,7 +34,7 @@ def test_nonsquare_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0] + dgm = cl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.5,-0.5],[0.,0.,0.]])).sum() <= 1e-6 -- cgit v1.2.3 From 6109fd920ba477f89e83fea3df9803232c169463 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 3 Feb 2022 10:24:38 +0100 Subject: Remove archive folder before testing wrapping functions --- src/python/test/test_remote_datasets.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 5e607d73..93a8a982 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -78,6 +78,9 @@ def test_fetch_remote_datasets(): # Test not printing bunny.npy LICENSE when accept_license = True assert "" == _get_bunny_license_print(accept_license = True).getvalue() + # Remove "remote_datasets" directory and all its content + import shutil + shutil.rmtree("remote_datasets") # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) for i in range(2): spiral_2d_arr = remote.fetch_spiral_2d() -- cgit v1.2.3 From a13282e4da9910a5d2bdadf97040095ae5b7880a Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 4 Feb 2022 15:39:51 +0100 Subject: Store fetched datasets in user directory by default --- src/python/gudhi/datasets/remote.py | 68 ++++++++++++++++++++++++++------- src/python/test/test_remote_datasets.py | 31 +++++++++++---- 2 files changed, 79 insertions(+), 20 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 3498a645..3d6c01b0 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -7,14 +7,52 @@ # Modification(s): # - YYYY/MM Author: Description of the modification -from os.path import join, exists +from os.path import join, exists, expanduser from os import makedirs from urllib.request import urlretrieve import hashlib +import shutil import numpy as np +def get_data_home(data_home = None): + """ + Return the path of the remote datasets directory. + This folder is used to store remotely fetched datasets. + By default the datasets directory is set to a folder named 'remote_datasets' in the user home folder. + Alternatively, it can be set by giving an explicit folder path. The '~' symbol is expanded to the user home folder. + If the folder does not already exist, it is automatically created. + + Parameters + ---------- + data_home : string + The path to remote datasets directory. Default is `None`, meaning that the data home directory will be set to "~/remote_datasets". + + Returns + ------- + data_home: string + The path to remote datasets directory. + """ + if data_home is None: + data_home = join("~", "remote_datasets") + data_home = expanduser(data_home) + makedirs(data_home, exist_ok=True) + return data_home + + +def clear_data_home(data_home = None): + """ + Delete all the content of the data home cache. + + Parameters + ---------- + data_home : string, default is None. + The path to remote datasets directory. If `None`, the default directory to be removed is set to "~/remote_datasets". + """ + data_home = get_data_home(data_home) + shutil.rmtree(data_home) + def _checksum_sha256(file_path): """ Compute the file checksum using sha256. @@ -85,7 +123,7 @@ def _fetch_remote(url, filename, dirname = "remote_datasets", file_checksum = No return file_path -def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = "remote_datasets/spiral_2d"): +def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): """ Fetch "spiral_2d.npy" remotely. @@ -94,7 +132,7 @@ def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = "remote_datasets/spira filename : string The name to give to downloaded file. Default is "spiral_2d.npy". dirname : string - The directory to save the file to. Default is "remote_datasets/spiral_2d". + The directory to save the file to. Default is None, meaning that the data home will be set to "~/remote_datasets/spiral_2d". Returns ------- @@ -104,20 +142,22 @@ def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = "remote_datasets/spira file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy" file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf' + if dirname is None: + dirname = join(get_data_home(dirname), "spiral_2d") + makedirs(dirname, exist_ok=True) + else: + dirname = get_data_home(dirname) + archive_path = join(dirname, filename) if not exists(archive_path): - # Create directory if not existing - if not exists(dirname): - makedirs(dirname) - file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum) return np.load(file_path_pkl, mmap_mode='r') else: return np.load(archive_path, mmap_mode='r') -def fetch_bunny(filename = "bunny.npy", dirname = "remote_datasets/bunny", accept_license = False): +def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): """ Fetch "bunny.npy" remotely and its LICENSE file. @@ -126,7 +166,7 @@ def fetch_bunny(filename = "bunny.npy", dirname = "remote_datasets/bunny", accep filename : string The name to give to downloaded file. Default is "bunny.npy". dirname : string - The directory to save the file to. Default is "remote_datasets/bunny". + The directory to save the file to. Default is None, meaning that the data home will be set to "~/remote_datasets/bunny". accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. Default is False. @@ -142,13 +182,15 @@ def fetch_bunny(filename = "bunny.npy", dirname = "remote_datasets/bunny", accep license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE" license_checksum = 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a' + if dirname is None: + dirname = join(get_data_home(dirname), "bunny") + makedirs(dirname, exist_ok=True) + else: + dirname = get_data_home(dirname) + archive_path = join(dirname, filename) if not exists(archive_path): - # Create directory if not existing - if not exists(dirname): - makedirs(dirname) - license_path = _fetch_remote(license_url, "LICENSE", dirname, license_checksum) file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum, accept_license) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 93a8a982..27eb51b0 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -10,7 +10,7 @@ from gudhi.datasets import remote import re -from os.path import isfile, exists +from os.path import isfile, isdir, expanduser from os import makedirs import io import sys @@ -30,8 +30,7 @@ def _check_dir_file_names(path_file_dw, filename, dirname): assert filename == names_dw[1] def _check_fetch_output(url, filename, dirname = "remote_datasets", file_checksum = None): - if not exists(dirname): - makedirs(dirname) + makedirs(dirname, exist_ok=True) path_file_dw = remote._fetch_remote(url, filename, dirname, file_checksum) _check_dir_file_names(path_file_dw, filename, dirname) @@ -40,8 +39,7 @@ def _get_bunny_license_print(accept_license = False): # Redirect stdout sys.stdout = capturedOutput - if not exists("remote_datasets/bunny"): - makedirs("remote_datasets/bunny") + makedirs("remote_datasets/bunny", exist_ok=True) remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "remote_datasets/bunny", '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b', accept_license) @@ -68,8 +66,7 @@ def test_fetch_remote_datasets(): # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) # Fetch LICENSE file - if not exists("remote_datasets/bunny"): - makedirs("remote_datasets/bunny") + makedirs("remote_datasets/bunny", exist_ok=True) remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "remote_datasets/bunny", 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a') with open("remote_datasets/bunny/LICENSE") as f: @@ -88,3 +85,23 @@ def test_fetch_remote_datasets(): bunny_arr = remote.fetch_bunny() assert bunny_arr.shape == (35947, 3) + + # Check that default dir was created + assert isdir(expanduser("~/remote_datasets")) == True + + # Test clear_data_home + clear_data_home() + assert isdir(expanduser("~/remote_datasets")) == False + + # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default + spiral_2d_arr = remote.fetch_spiral_2d(dirname = "~/test") + assert spiral_2d_arr.shape == (114562, 2) + + bunny_arr = remote.fetch_bunny(dirname = "~/test") + assert bunny_arr.shape == (35947, 3) + + assert isdir(expanduser("~/test")) == True + + # Test clear_data_home with data directory different from default + clear_data_home("~/test") + assert isdir(expanduser("~/test")) == False -- cgit v1.2.3 From b0071de9ee7b6b4feb2eb9f19ceb759de21c997f Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 4 Feb 2022 16:09:54 +0100 Subject: Add forgotten module name before func call --- src/python/test/test_remote_datasets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 27eb51b0..9532b4ec 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -90,7 +90,7 @@ def test_fetch_remote_datasets(): assert isdir(expanduser("~/remote_datasets")) == True # Test clear_data_home - clear_data_home() + remote.clear_data_home() assert isdir(expanduser("~/remote_datasets")) == False # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default @@ -103,5 +103,5 @@ def test_fetch_remote_datasets(): assert isdir(expanduser("~/test")) == True # Test clear_data_home with data directory different from default - clear_data_home("~/test") + remote.clear_data_home("~/test") assert isdir(expanduser("~/test")) == False -- cgit v1.2.3 From b5d7d6c2857d305ba2828065310c11edefb37c4e Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 7 Feb 2022 13:03:45 +0100 Subject: Test get_data_home and clear_data_home on a separate folder --- src/python/test/test_remote_datasets.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 9532b4ec..c160f270 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -87,21 +87,20 @@ def test_fetch_remote_datasets(): assert bunny_arr.shape == (35947, 3) # Check that default dir was created - assert isdir(expanduser("~/remote_datasets")) == True - - # Test clear_data_home - remote.clear_data_home() - assert isdir(expanduser("~/remote_datasets")) == False + assert isdir(expanduser("~/remote_datasets")) # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default - spiral_2d_arr = remote.fetch_spiral_2d(dirname = "~/test") + spiral_2d_arr = remote.fetch_spiral_2d(dirname = "~/another_fetch_folder") assert spiral_2d_arr.shape == (114562, 2) - bunny_arr = remote.fetch_bunny(dirname = "~/test") + bunny_arr = remote.fetch_bunny(dirname = "~/another_fetch_folder") assert bunny_arr.shape == (35947, 3) - assert isdir(expanduser("~/test")) == True + assert isdir(expanduser("~/another_fetch_folder")) + + # Test get_data_home and clear_data_home on new empty folder + empty_data_home = remote.get_data_home(data_home="empty_folder") + assert isdir(empty_data_home) - # Test clear_data_home with data directory different from default - remote.clear_data_home("~/test") - assert isdir(expanduser("~/test")) == False + remote.clear_data_home(data_home=empty_data_home) + assert not isdir(empty_data_home) -- cgit v1.2.3 From e9b020adf11d48ce7a88932a5fe12cef011e72c9 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 7 Feb 2022 13:18:57 +0100 Subject: Separate tests into different functions and remove all test folders at the end --- src/python/test/test_remote_datasets.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index c160f270..2e595423 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -7,15 +7,17 @@ # Modification(s): # - YYYY/MM Author: Description of the modification - from gudhi.datasets import remote + import re -from os.path import isfile, isdir, expanduser -from os import makedirs +import shutil import io import sys import pytest +from os.path import isfile, isdir, expanduser +from os import makedirs + def _check_dir_file_names(path_file_dw, filename, dirname): assert isfile(path_file_dw) @@ -76,8 +78,9 @@ def test_fetch_remote_datasets(): assert "" == _get_bunny_license_print(accept_license = True).getvalue() # Remove "remote_datasets" directory and all its content - import shutil shutil.rmtree("remote_datasets") + +def test_fetch_remote_datasets_wrapped(): # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) for i in range(2): spiral_2d_arr = remote.fetch_spiral_2d() @@ -98,6 +101,14 @@ def test_fetch_remote_datasets(): assert isdir(expanduser("~/another_fetch_folder")) + # Remove test folders + shutil.rmtree(expanduser("~/remote_datasets")) + shutil.rmtree(expanduser("~/another_fetch_folder")) + + assert not isdir(expanduser("~/remote_datasets")) + assert not isdir(expanduser("~/another_fetch_folder")) + +def test_data_home(): # Test get_data_home and clear_data_home on new empty folder empty_data_home = remote.get_data_home(data_home="empty_folder") assert isdir(empty_data_home) -- cgit v1.2.3 From e964ec32247ce02fb12939cfcddaeabc04639869 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 7 Feb 2022 16:52:55 +0100 Subject: Del used variables before removing folders --- src/python/test/test_remote_datasets.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 2e595423..cb53cb85 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -102,6 +102,8 @@ def test_fetch_remote_datasets_wrapped(): assert isdir(expanduser("~/another_fetch_folder")) # Remove test folders + del spiral_2d_arr + del bunny_arr shutil.rmtree(expanduser("~/remote_datasets")) shutil.rmtree(expanduser("~/another_fetch_folder")) -- cgit v1.2.3 From 2d1fb6b63f0ca0c7e027cc298fc16198a6283df1 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 11 Feb 2022 15:00:27 +0100 Subject: Do some code clean up/renaming --- .../benchmark/cech_complex_benchmark.cpp | 13 ++- .../example/cech_complex_example_from_points.cpp | 2 +- .../example/cech_complex_step_by_step.cpp | 6 +- src/Cech_complex/include/gudhi/Cech_complex.h | 40 +++----- .../include/gudhi/Cech_complex/Cech_kernel.h | 107 --------------------- .../include/gudhi/Cech_complex_blocker.h | 22 ++--- .../include/gudhi/sphere_circumradius.h | 62 ++++++++++++ src/Cech_complex/test/test_cech_complex.cpp | 10 +- src/Cech_complex/utilities/cech_persistence.cpp | 5 +- 9 files changed, 106 insertions(+), 161 deletions(-) delete mode 100644 src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h create mode 100644 src/Cech_complex/include/gudhi/sphere_circumradius.h diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index 94c5fa4f..b283e1a8 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -34,9 +34,8 @@ using Proximity_graph = Gudhi::Proximity_graph; using Rips_complex = Gudhi::rips_complex::Rips_complex; using Kernel = CGAL::Epick_d>; using Point_cgal = typename Kernel::Point_d; -using Point_cloud_cgal = std::vector; using Points_off_reader_cgal = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Cech_complex = Gudhi::cech_complex::Cech_complex; class Minimal_enclosing_ball_radius { public: @@ -83,11 +82,11 @@ int main(int argc, char* argv[]) { off_reader.get_point_cloud(), threshold, Minimal_enclosing_ball_radius()); std::clog << miniball_clock << std::endl; - Gudhi::Clock cgal_miniball_clock("Gudhi::Minimal_enclosing_ball_radius_cgal()"); + Gudhi::Clock cgal_circumsphere_clock("Gudhi::cech_complex::Sphere_circumradius_cgal()"); // Compute the proximity graph of the points - Proximity_graph cgal_miniball_prox_graph = Gudhi::compute_proximity_graph( - off_reader_cgal.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius()); - std::clog << cgal_miniball_clock << std::endl; + Proximity_graph cgal_circumsphere_prox_graph = Gudhi::compute_proximity_graph( + off_reader_cgal.get_point_cloud(), threshold, Gudhi::cech_complex::Sphere_circumradius()); + std::clog << cgal_circumsphere_clock << std::endl; boost::filesystem::path full_path(boost::filesystem::current_path()); std::clog << "Current path is : " << full_path << std::endl; @@ -109,7 +108,7 @@ int main(int argc, char* argv[]) { std::clog << radius << ";"; Gudhi::Clock rips_clock("Rips computation"); Rips_complex rips_complex_from_points(off_reader_cgal.get_point_cloud(), radius, - Gudhi::Minimal_enclosing_ball_radius()); + Gudhi::cech_complex::Sphere_circumradius()); Simplex_tree rips_stree; rips_complex_from_points.create_complex(rips_stree, p0.size() - 1); // ------------------------------------------ diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp index 78861951..38021e4a 100644 --- a/src/Cech_complex/example/cech_complex_example_from_points.cpp +++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp @@ -16,7 +16,7 @@ int main() { using FT = typename Kernel::FT; using Point = typename Kernel::Point_d; using Point_cloud = std::vector; - using Cech_complex = Gudhi::cech_complex::Cech_complex; + using Cech_complex = Gudhi::cech_complex::Cech_complex; Point_cloud points; diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp index c8dd1585..4401f6af 100644 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp @@ -9,7 +9,7 @@ */ #include -#include +#include #include #include @@ -52,7 +52,7 @@ class Cech_blocker { std::clog << "#(" << vertex << ")#"; #endif // DEBUG_TRACES } - Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points); + Filtration_value radius = Gudhi::cech_complex::Sphere_circumradius()(points); #ifdef DEBUG_TRACES std::clog << "radius = " << radius << " - " << (radius > max_radius_) << std::endl; #endif // DEBUG_TRACES @@ -83,7 +83,7 @@ int main(int argc, char* argv[]) { // Compute the proximity graph of the points Proximity_graph prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), max_radius, - Gudhi::Minimal_enclosing_ball_radius()); + Gudhi::cech_complex::Sphere_circumradius()); // Construct the Cech complex in a Simplex Tree Simplex_tree st; diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index 0031d861..375be1d2 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -11,7 +11,7 @@ #ifndef CECH_COMPLEX_H_ #define CECH_COMPLEX_H_ -#include // for Gudhi::Minimal_enclosing_ball_radius +#include // for Gudhi::cech_complex::Sphere_circumradius #include // for Gudhi::Proximity_graph #include // for GUDHI_CHECK #include // for Gudhi::cech_complex::Cech_blocker @@ -31,23 +31,21 @@ namespace cech_complex { * * \details * The data structure is a proximity graph, containing edges when the edge length is less or equal - * to a given max_radius. Edge length is computed from `Gudhi::Minimal_enclosing_ball_radius` distance function. + * to a given max_radius. Edge length is computed from `Gudhi::cech_complex::Sphere_circumradius` distance function. * - * \tparam SimplicialComplexForProximityGraph furnishes `Vertex_handle` and `Filtration_value` type definition required - * by `Gudhi::Proximity_graph`. + * \tparam Kernel CGAL kernel. + * + * \tparam SimplicialComplexForCechComplex furnishes `Vertex_handle` and `Filtration_value` type definition required + * by `Gudhi::Proximity_graph` and Cech blocker. * - * \tparam ForwardPointRange must be a range for which `std::begin()` and `std::end()` methods return input - * iterators on a point. `std::begin()` and `std::end()` methods are also required for a point. */ -template +template class Cech_complex { private: // Required by compute_proximity_graph - using Vertex_handle = typename SimplicialComplexForProximityGraph::Vertex_handle; - using Filtration_value = typename SimplicialComplexForProximityGraph::Filtration_value; - using Proximity_graph = Gudhi::Proximity_graph; - - public: + using Vertex_handle = typename SimplicialComplexForCechComplex::Vertex_handle; + using Filtration_value = typename SimplicialComplexForCechComplex::Filtration_value; + using Proximity_graph = Gudhi::Proximity_graph; using cech_blocker = Cech_blocker; @@ -57,27 +55,21 @@ class Cech_complex { // Numeric type of coordinates in the kernel using FT = typename cech_blocker::FT; // Sphere is a pair of point and squared radius. - using Sphere = typename std::pair; + using Sphere = typename cech_blocker::Sphere; - public: + public: /** \brief Cech_complex constructor from a list of points. * - * @param[in] points Range of points. + * @param[in] points Vector of points where each point is defined as `kernel::Point_d`. * @param[in] max_radius Maximal radius value. * - * \tparam ForwardPointRange must be a range of Point. Point must be a range of copyable Cartesian coordinates. - * */ - Cech_complex(const ForwardPointRange& points, Filtration_value max_radius) : max_radius_(max_radius) { - // Point cloud deep copy - -// point_cloud_.reserve(boost::size(points)); -// for (auto&& point : points) point_cloud_.emplace_back(point.cartesian_begin(), point.cartesian_end()); + Cech_complex(const Point_cloud & points, Filtration_value max_radius) : max_radius_(max_radius) { point_cloud_.assign(points.begin(), points.end()); - cech_skeleton_graph_ = Gudhi::compute_proximity_graph( - point_cloud_, max_radius_, Gudhi::Minimal_enclosing_ball_radius()); + cech_skeleton_graph_ = Gudhi::compute_proximity_graph( + point_cloud_, max_radius_, Sphere_circumradius()); } /** \brief Initializes the simplicial complex from the proximity graph and expands it until a given maximal diff --git a/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h b/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h deleted file mode 100644 index 89012206..00000000 --- a/src/Cech_complex/include/gudhi/Cech_complex/Cech_kernel.h +++ /dev/null @@ -1,107 +0,0 @@ -/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - * Author(s): Hind Montassif - * - * Copyright (C) 2021 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef CECH_KERNEL_H_ -#define CECH_KERNEL_H_ - -#include // for #include - -#include // for std::sqrt -#include - -namespace Gudhi { - -// namespace cech_complex { - -/** @brief Compute the radius of the minimal enclosing ball between Points given by a range of coordinates. - * The points are assumed to have the same dimension. */ -template -class Minimal_enclosing_ball_radius { - private: - Kernel kernel_; - public: - using Point = typename Kernel::Point_d; - using Point_cloud = typename std::vector; - - /** \brief Enclosing ball radius from two points using CGAL. - * - * @param[in] point_1 - * @param[in] point_2 - * @return Enclosing ball radius for the two points. - * \tparam Point must be a Kernel::Point_d from CGAL. - * - */ - double operator()(const Point& point_1, const Point& point_2) const { - return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; - } - - - /** \brief Enclosing ball radius from a point cloud using CGAL. - * - * @param[in] point_cloud The points. - * @return Enclosing ball radius for the points. - * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. - * - */ - double operator()(const Point_cloud& point_cloud) const { - return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); - } - -}; - -/** - * \class Cech_kernel - * \brief Cech complex kernel container. - * - * \details - * The Cech complex kernel container stores CGAL Kernel and dispatch basic computations. - */ - -// template < typename Kernel > -// class Cech_kernel { -// private: -// // Kernel for functions access. -// Kernel kernel_; -// public: -// using Point_d = typename Kernel::Point_d; -// // Numeric type of coordinates in the kernel -// using FT = typename Kernel::FT; -// // Sphere is a pair of point and squared radius. -// using Sphere = typename std::pair; -// -// int get_dimension(const Point_d& p0) const { -// return kernel_.point_dimension_d_object()(p0); -// } -// -// template -// Sphere get_sphere(PointIterator begin, PointIterator end) const { -// Point_d c = kernel_.construct_circumcenter_d_object()(begin, end); -// FT r = kernel_.squared_distance_d_object()(c, *begin); -// return std::make_pair(std::move(c), std::move(r)); -// } -// -// template -// FT get_squared_radius(PointIterator begin, PointIterator end) const { -// return kernel_.compute_squared_radius_d_object()(begin, end); -// } -// -// FT get_squared_radius(const Sphere& sph) const { -// return sph.second; -// } -// }; - - -//} // namespace cech_complex - -// namespace cechcomplex = cech_complex; - -} // namespace Gudhi - -#endif // CECH_KERNEL_H_ diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index f7f86534..1a696422 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -31,17 +31,15 @@ namespace cech_complex { * \details * Čech blocker is an oracle constructed from a Cech_complex and a simplicial complex. * - * \tparam SimplicialComplexForProximityGraph furnishes `Simplex_handle` and `Filtration_value` type definition, + * \tparam SimplicialComplexForCech furnishes `Simplex_handle` and `Filtration_value` type definition, * `simplex_vertex_range(Simplex_handle sh)`and `assign_filtration(Simplex_handle sh, Filtration_value filt)` methods. * - * \tparam Chech_complex is required by the blocker. + * \tparam Cech_complex is required by the blocker. + * + * \tparam Kernel CGAL kernel. */ template class Cech_blocker { - private: - - using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle; - using Filtration_value = typename SimplicialComplexForCech::Filtration_value; public: @@ -51,10 +49,10 @@ class Cech_blocker { // Sphere is a pair of point and squared radius. using Sphere = typename std::pair; - template - FT get_squared_radius(PointIterator begin, PointIterator end) const { - return kernel_.compute_squared_radius_d_object()(begin, end); - } + private: + + using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle; + using Filtration_value = typename SimplicialComplexForCech::Filtration_value; template Sphere get_sphere(PointIterator begin, PointIterator end) const { @@ -63,6 +61,7 @@ class Cech_blocker { return std::make_pair(std::move(c), std::move(r)); } + public: /** \internal \brief Čech complex blocker operator() - the oracle - assigns the filtration value from the simplex * radius and returns if the simplex expansion must be blocked. @@ -108,10 +107,11 @@ class Cech_blocker { if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { radius = std::sqrt(cast_to_double(sph.second)); #ifdef DEBUG_TRACES - std::clog << "circumcenter: " << sph.first << ", radius: " << radius << std::endl; + std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES if (cast_to_double(sph.second) < cast_to_double(min_enclos_ball.second)) min_enclos_ball = sph; + break; } } // Get the minimal radius of all faces enclosing balls if exists diff --git a/src/Cech_complex/include/gudhi/sphere_circumradius.h b/src/Cech_complex/include/gudhi/sphere_circumradius.h new file mode 100644 index 00000000..a6dec3dc --- /dev/null +++ b/src/Cech_complex/include/gudhi/sphere_circumradius.h @@ -0,0 +1,62 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Hind Montassif + * + * Copyright (C) 2021 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef SPHERE_CIRCUMRADIUS_H_ +#define SPHERE_CIRCUMRADIUS_H_ + +#include // for #include + +#include // for std::sqrt +#include + +namespace Gudhi { + +namespace cech_complex { + +/** @brief Compute the circumradius of the sphere passing through points given by a range of coordinates. + * The points are assumed to have the same dimension. */ +template +class Sphere_circumradius { + private: + Kernel kernel_; + public: + using Point = typename Kernel::Point_d; + using Point_cloud = typename std::vector; + + /** \brief Circumradius of sphere passing through two points using CGAL. + * + * @param[in] point_1 + * @param[in] point_2 + * @return Sphere circumradius passing through two points. + * \tparam Point must be a Kernel::Point_d from CGAL. + * + */ + double operator()(const Point& point_1, const Point& point_2) const { + return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; + } + + /** \brief Circumradius of sphere passing through point cloud using CGAL. + * + * @param[in] point_cloud The points. + * @return Sphere circumradius passing through the points. + * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. + * + */ + double operator()(const Point_cloud& point_cloud) const { + return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); + } + +}; + +} // namespace cech_complex + +} // namespace Gudhi + +#endif // SPHERE_CIRCUMRADIUS_H_ diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index ca7a9778..4cf8b68f 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -22,7 +22,7 @@ // to construct Cech_complex from a OFF file of points #include #include -#include +#include #include #include // For EXACT or SAFE version @@ -36,7 +36,7 @@ using Point = typename Kernel::Point_d; using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Cech_complex = Gudhi::cech_complex::Cech_complex; BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { // ---------------------------------------------------------------------------- @@ -108,11 +108,11 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { std::clog << vertex << ","; vp.push_back(points.at(vertex)); } - std::clog << ") - distance =" << Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1)) + std::clog << ") - distance =" << Gudhi::cech_complex::Sphere_circumradius()(vp.at(0), vp.at(1)) << " - filtration =" << st.filtration(f_simplex) << std::endl; BOOST_CHECK(vp.size() == 2); GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), - Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1))); + Gudhi::cech_complex::Sphere_circumradius()(vp.at(0), vp.at(1))); } } @@ -153,7 +153,7 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { Simplex_tree::Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10})); std::clog << "f1410= " << f1410 << std::endl; - // In this case, the computed sphere using CGAL kernel does not match the minimal enclosing ball; the filtration value check is therefore done against a hardcoded value + // In this case, the computed circumsphere using CGAL kernel does not match the minimal enclosing ball; the filtration value check is therefore done against a hardcoded value GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, 1.); Point_cloud points469; diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp index ccf63e3e..82992f2d 100644 --- a/src/Cech_complex/utilities/cech_persistence.cpp +++ b/src/Cech_complex/utilities/cech_persistence.cpp @@ -9,7 +9,7 @@ */ #include -#include +#include #include #include #include @@ -28,9 +28,8 @@ using Filtration_value = Simplex_tree::Filtration_value; using Kernel = CGAL::Epeck_d; using Point = typename Kernel::Point_d; -using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; +using Cech_complex = Gudhi::cech_complex::Cech_complex; using Field_Zp = Gudhi::persistent_cohomology::Field_Zp; using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology; -- cgit v1.2.3 From fcc9fd9f01d3f9680afccac0c3aff894e6ea4ef3 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 11 Feb 2022 15:01:56 +0100 Subject: Update cech doc intro regarding code modification --- src/Cech_complex/doc/Intro_cech_complex.h | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h index 698f9749..644fd6cc 100644 --- a/src/Cech_complex/doc/Intro_cech_complex.h +++ b/src/Cech_complex/doc/Intro_cech_complex.h @@ -28,7 +28,7 @@ namespace cech_complex { * simplicial complex constructed * from a proximity graph. The set of all simplices is filtered by the radius of their minimal enclosing ball. * - * The input shall be a point cloud in an Euclidean space. + * The input shall be a range of points where a point is defined as CGAL kernel Point_d. * * \remark For people only interested in the topology of the \ref cech_complex (for instance persistence), * \ref alpha_complex is equivalent to the \ref cech_complex and much smaller if you do not bound the radii. @@ -37,8 +37,7 @@ namespace cech_complex { * \subsection cechalgorithm Algorithm * * Cech_complex first builds a proximity graph from a point cloud. - * The filtration value of each edge of the `Gudhi::Proximity_graph` is computed from - * `Gudhi::Minimal_enclosing_ball_radius` function. + * The filtration value of each edge of the `Gudhi::Proximity_graph` is computed using CGAL kernel functions. * * All edges that have a filtration value strictly greater than a user given maximal radius value, \f$max\_radius\f$, * are not inserted into the complex. @@ -60,13 +59,6 @@ namespace cech_complex { * * \image html "cech_complex_representation.png" "Čech complex expansion" * - * The minimal ball radius computation is insured by - * - * the miniball software (V3.0) - Smallest Enclosing Balls of Points - and distributed with GUDHI. - * Please refer to - * - * the miniball software design description for more information about this computation. - * * This radius computation is the reason why the Cech_complex is taking much more time to be computed than the * \ref rips_complex but it offers more topological guarantees. * -- cgit v1.2.3 From f55f93ba971768441de005fde59802229a1e008f Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 11 Feb 2022 16:11:16 +0100 Subject: Set back original simple cech example from points --- src/Cech_complex/example/cech_complex_example_from_points.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp index 38021e4a..034077eb 100644 --- a/src/Cech_complex/example/cech_complex_example_from_points.cpp +++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp @@ -46,11 +46,11 @@ int main() { // ---------------------------------------------------------------------------- // Init of a Cech complex from points // ---------------------------------------------------------------------------- - Filtration_value max_radius = 100.; //100.; + Filtration_value max_radius = 1.; Cech_complex cech_complex_from_points(points, max_radius); Simplex_tree stree; - cech_complex_from_points.create_complex(stree, 6); //6 + cech_complex_from_points.create_complex(stree, 2); // ---------------------------------------------------------------------------- // Display information about the one skeleton Cech complex // ---------------------------------------------------------------------------- -- cgit v1.2.3 From 3b7bd4fd8b21fc8ce0d7b2848b5ac9bdd397c080 Mon Sep 17 00:00:00 2001 From: Manu Date: Mon, 14 Feb 2022 13:58:01 +0100 Subject: A bug in the Entropy function has been solved --- src/python/gudhi/representations/vector_methods.py | 13 ++++++++----- src/python/test/test_representations.py | 11 ++++++++++- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py index f8078d03..57ca5999 100644 --- a/src/python/gudhi/representations/vector_methods.py +++ b/src/python/gudhi/representations/vector_methods.py @@ -510,16 +510,19 @@ class Entropy(BaseEstimator, TransformerMixin): for i in range(num_diag): orig_diagram, diagram, num_pts_in_diag = X[i], new_X[i], X[i].shape[0] try: - new_diagram = DiagramScaler(use=True, scalers=[([1], MaxAbsScaler())]).fit_transform([diagram])[0] + #new_diagram = DiagramScaler(use=True, scalers=[([1], MaxAbsScaler())]).fit_transform([diagram])[0] + new_diagram = DiagramScaler().fit_transform([diagram])[0] except ValueError: # Empty persistence diagram case - https://github.com/GUDHI/gudhi-devel/issues/507 assert len(diagram) == 0 new_diagram = np.empty(shape = [0, 2]) - + + p = new_diagram[:,1] + L = sum(p) + p = p/L if self.mode == "scalar": - ent = - np.sum( np.multiply(new_diagram[:,1], np.log(new_diagram[:,1])) ) + ent = -np.dot(p, np.log(p)) Xfit.append(np.array([[ent]])) - else: ent = np.zeros(self.resolution) for j in range(num_pts_in_diag): @@ -527,7 +530,7 @@ class Entropy(BaseEstimator, TransformerMixin): min_idx = np.clip(np.ceil((px - self.sample_range[0]) / step_x).astype(int), 0, self.resolution) max_idx = np.clip(np.ceil((py - self.sample_range[0]) / step_x).astype(int), 0, self.resolution) for k in range(min_idx, max_idx): - ent[k] += (-1) * new_diagram[j,1] * np.log(new_diagram[j,1]) + ent[k] += (-1) * p[j] * np.log(p[j]) if self.normalized: ent = ent / np.linalg.norm(ent, ord=1) Xfit.append(np.reshape(ent,[1,-1])) diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py index d219ce7a..6a3dddc4 100755 --- a/src/python/test/test_representations.py +++ b/src/python/test/test_representations.py @@ -152,7 +152,16 @@ def test_vectorization_empty_diagrams(): scv = Entropy(mode="vector", normalized=False, resolution=random_resolution)(empty_diag) assert not np.any(scv) assert scv.shape[0] == random_resolution - + +def test_entropy_miscalculation(): + diag_ex = np.array([[0.0,1.0], [0.0,1.0], [0.0,2.0]]) + def pe(pd): + l = pd[:,1] - pd[:,0] + l = l/sum(l) + return -np.dot(l, np.log(l)) + sce = Entropy(mode="scalar") + assert [[pe_max(diag_ex)]] == sce.fit_transform([diag_ex]) + def test_kernel_empty_diagrams(): empty_diag = np.empty(shape = [0, 2]) assert SlicedWassersteinDistance(num_directions=100)(empty_diag, empty_diag) == 0. -- cgit v1.2.3 From 758111506dfb99cdc59981395386926e178d447c Mon Sep 17 00:00:00 2001 From: Manu Date: Wed, 23 Feb 2022 19:20:06 +0100 Subject: a test for gudhi.representations.Entropy has been added --- src/python/gudhi/representations/vector_methods.py | 2 +- src/python/test/test_representations.py | 12 +++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py index 57ca5999..ef1329d0 100644 --- a/src/python/gudhi/representations/vector_methods.py +++ b/src/python/gudhi/representations/vector_methods.py @@ -532,7 +532,7 @@ class Entropy(BaseEstimator, TransformerMixin): for k in range(min_idx, max_idx): ent[k] += (-1) * p[j] * np.log(p[j]) if self.normalized: - ent = ent / np.linalg.norm(ent, ord=1) + ent = ent / (np.linalg.norm(ent, ord=1)) Xfit.append(np.reshape(ent,[1,-1])) Xfit = np.concatenate(Xfit, axis=0) diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py index 6a3dddc4..553ceba0 100755 --- a/src/python/test/test_representations.py +++ b/src/python/test/test_representations.py @@ -160,7 +160,17 @@ def test_entropy_miscalculation(): l = l/sum(l) return -np.dot(l, np.log(l)) sce = Entropy(mode="scalar") - assert [[pe_max(diag_ex)]] == sce.fit_transform([diag_ex]) + assert [[pe(diag_ex)]] == sce.fit_transform([diag_ex]) + sce = Entropy(mode="vector", resolution=4, normalized=False) + pef = [-1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2), + -1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2), + -1/2*np.log(1/2), + 0.0] + assert all(([pef] == sce.fit_transform([diag_ex]))[0]) + sce = Entropy(mode="vector", resolution=4, normalized=True) + pefN = (sce.fit_transform([diag_ex]))[0] + area = np.linalg.norm(pefN, ord=1) + assert area==1 def test_kernel_empty_diagrams(): empty_diag = np.empty(shape = [0, 2]) -- cgit v1.2.3 From a1e8821384c58f7d843a3271f909c31c26649032 Mon Sep 17 00:00:00 2001 From: Manu Date: Wed, 23 Feb 2022 19:27:36 +0100 Subject: Revert "a test for gudhi.representations.Entropy has been added" This reverts commit 758111506dfb99cdc59981395386926e178d447c. --- src/python/gudhi/representations/vector_methods.py | 2 +- src/python/test/test_representations.py | 12 +----------- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py index ef1329d0..57ca5999 100644 --- a/src/python/gudhi/representations/vector_methods.py +++ b/src/python/gudhi/representations/vector_methods.py @@ -532,7 +532,7 @@ class Entropy(BaseEstimator, TransformerMixin): for k in range(min_idx, max_idx): ent[k] += (-1) * p[j] * np.log(p[j]) if self.normalized: - ent = ent / (np.linalg.norm(ent, ord=1)) + ent = ent / np.linalg.norm(ent, ord=1) Xfit.append(np.reshape(ent,[1,-1])) Xfit = np.concatenate(Xfit, axis=0) diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py index 553ceba0..6a3dddc4 100755 --- a/src/python/test/test_representations.py +++ b/src/python/test/test_representations.py @@ -160,17 +160,7 @@ def test_entropy_miscalculation(): l = l/sum(l) return -np.dot(l, np.log(l)) sce = Entropy(mode="scalar") - assert [[pe(diag_ex)]] == sce.fit_transform([diag_ex]) - sce = Entropy(mode="vector", resolution=4, normalized=False) - pef = [-1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2), - -1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2), - -1/2*np.log(1/2), - 0.0] - assert all(([pef] == sce.fit_transform([diag_ex]))[0]) - sce = Entropy(mode="vector", resolution=4, normalized=True) - pefN = (sce.fit_transform([diag_ex]))[0] - area = np.linalg.norm(pefN, ord=1) - assert area==1 + assert [[pe_max(diag_ex)]] == sce.fit_transform([diag_ex]) def test_kernel_empty_diagrams(): empty_diag = np.empty(shape = [0, 2]) -- cgit v1.2.3 From 9c0b0d348b448d68d28fbab661626ab2263e77d0 Mon Sep 17 00:00:00 2001 From: Manu Date: Wed, 23 Feb 2022 19:31:48 +0100 Subject: A test of gudhi.representations.Entropy has been added --- src/python/test/test_representations.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py index 6a3dddc4..4a455bb6 100755 --- a/src/python/test/test_representations.py +++ b/src/python/test/test_representations.py @@ -160,8 +160,18 @@ def test_entropy_miscalculation(): l = l/sum(l) return -np.dot(l, np.log(l)) sce = Entropy(mode="scalar") - assert [[pe_max(diag_ex)]] == sce.fit_transform([diag_ex]) - + assert [[pe(diag_ex)]] == sce.fit_transform([diag_ex]) + sce = Entropy(mode="vector", resolution=4, normalized=False) + pef = [-1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2), + -1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2), + -1/2*np.log(1/2), + 0.0] + assert all(([pef] == sce.fit_transform([diag_ex]))[0]) + sce = Entropy(mode="vector", resolution=4, normalized=True) + pefN = (sce.fit_transform([diag_ex]))[0] + area = np.linalg.norm(pefN, ord=1) + assert area==1 + def test_kernel_empty_diagrams(): empty_diag = np.empty(shape = [0, 2]) assert SlicedWassersteinDistance(num_directions=100)(empty_diag, empty_diag) == 0. -- cgit v1.2.3 From cbba4bf2005ce129691d358a2d7475c5132e39e0 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 28 Feb 2022 08:41:38 +0100 Subject: changed doc + added tensorflow indexing --- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 2 +- src/python/gudhi/tensorflow/cubical_layer.py | 2 +- src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py | 6 +++--- src/python/gudhi/tensorflow/rips_layer.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index 56bb4492..b8518cdb 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -42,7 +42,7 @@ Example of gradient computed from lower-star filtration of a simplex tree .. testoutput:: [2 4] - [-1. 1.] + [-1. 1.] Documentation for LowerStarSimplexTreeLayer ------------------------------------------- diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 99d02d66..369b0e54 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -35,7 +35,7 @@ def _Cubical(Xflat, Xdim, dimensions): class CubicalLayer(tf.keras.layers.Layer): """ - TensorFlow layer for computing cubical persistence out of a cubical complex + TensorFlow layer for computing the persistent homology of a cubical complex """ def __init__(self, dimensions, min_persistence=None, **kwargs): """ diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index 8da1f7fe..cf7df6de 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -47,7 +47,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): Constructor for the LowerStarSimplexTreeLayer class Parameters: - simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n = number of vertices + simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n = number of vertices. Note that its filtration values are modified in each call of the class. dimensions (List[int]): homology dimensions min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) """ @@ -76,8 +76,8 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): essential_dgm = tf.reshape(tf.gather(filtration, indices[idx_dim][1]), [-1,1]) min_pers = self.min_persistence[idx_dim] if min_pers >= 0: - persistent_indices = np.argwhere(np.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers).ravel() - self.dgms.append((tf.gather(finite_dgm, indices=persistent_indices), essential_dgm)) + persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers) + self.dgms.append((tf.reshape(tf.gather(finite_dgm, indices=persistent_indices),[-1,2]), essential_dgm)) else: self.dgms.append((finite_dgm, essential_dgm)) return self.dgms diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 88d501c1..7b5edfa3 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -83,8 +83,8 @@ class RipsLayer(tf.keras.layers.Layer): essential_dgm = tf.zeros([cur_idx[1].shape[0],1]) min_pers = self.min_persistence[idx_dim] if min_pers >= 0: - persistent_indices = np.argwhere(np.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers).ravel() - self.dgms.append((tf.gather(finite_dgm, indices=persistent_indices), essential_dgm)) + persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers) + self.dgms.append((tf.reshape(tf.gather(finite_dgm, indices=persistent_indices),[-1,2]), essential_dgm)) else: self.dgms.append((finite_dgm, essential_dgm)) return self.dgms -- cgit v1.2.3 From 2eabdd9afcd35a345a95bfde76b7a0f1ef545788 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 28 Feb 2022 11:04:50 +0100 Subject: fix rips test code --- src/python/doc/rips_complex_tflow_itf_ref.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 104b0971..83421b2a 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -10,6 +10,11 @@ TensorFlow layer for Vietoris-Rips persistence Example of gradient computed from Vietoris-Rips persistence ----------------------------------------------------------- +.. testsetup:: + :hide: + import numpy + numpy.set_printoptions(precision=4) + .. testcode:: from gudhi.tensorflow import RipsLayer -- cgit v1.2.3 From 5c0c731fdd2bc41c2a4833be1612dca5a082c337 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 2 Mar 2022 10:26:52 +0100 Subject: Modifications following PR review --- src/python/gudhi/datasets/remote.py | 60 ++++++++++++++++++--------------- src/python/test/test_remote_datasets.py | 38 ++++++++++----------- 2 files changed, 51 insertions(+), 47 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 3d6c01b0..618fa80e 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -20,14 +20,14 @@ def get_data_home(data_home = None): """ Return the path of the remote datasets directory. This folder is used to store remotely fetched datasets. - By default the datasets directory is set to a folder named 'remote_datasets' in the user home folder. + By default the datasets directory is set to a folder named 'gudhi_data' in the user home folder. Alternatively, it can be set by giving an explicit folder path. The '~' symbol is expanded to the user home folder. If the folder does not already exist, it is automatically created. Parameters ---------- data_home : string - The path to remote datasets directory. Default is `None`, meaning that the data home directory will be set to "~/remote_datasets". + The path to remote datasets directory. Default is `None`, meaning that the data home directory will be set to "~/gudhi_data". Returns ------- @@ -35,7 +35,7 @@ def get_data_home(data_home = None): The path to remote datasets directory. """ if data_home is None: - data_home = join("~", "remote_datasets") + data_home = join("~", "gudhi_data") data_home = expanduser(data_home) makedirs(data_home, exist_ok=True) return data_home @@ -43,12 +43,12 @@ def get_data_home(data_home = None): def clear_data_home(data_home = None): """ - Delete all the content of the data home cache. + Delete the data home cache directory and all its content. Parameters ---------- data_home : string, default is None. - The path to remote datasets directory. If `None`, the default directory to be removed is set to "~/remote_datasets". + The path to remote datasets directory. If `None`, the default directory to be removed is set to "~/gudhi_data". """ data_home = get_data_home(data_home) shutil.rmtree(data_home) @@ -77,7 +77,7 @@ def _checksum_sha256(file_path): sha256_hash.update(buffer) return sha256_hash.hexdigest() -def _fetch_remote(url, filename, dirname = "remote_datasets", file_checksum = None, accept_license = False): +def _fetch_remote(url, filename, dirname = "gudhi_data", file_checksum = None, accept_license = False): """ Fetch the wanted dataset from the given url and save it in file_path. @@ -88,10 +88,10 @@ def _fetch_remote(url, filename, dirname = "remote_datasets", file_checksum = No filename : string The name to give to downloaded file. dirname : string - The directory to save the file to. Default is "remote_datasets". + The directory to save the file to. Default is "gudhi_data". file_checksum : string The file checksum using sha256 to check against the one computed on the downloaded file. - Default is 'None'. + Default is 'None', which means the checksum is not checked. accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. Default is False. @@ -100,6 +100,11 @@ def _fetch_remote(url, filename, dirname = "remote_datasets", file_checksum = No ------- file_path: string Full path of the created file. + + Raises + ------ + IOError + If the computed SHA256 checksum of file does not match the one given by the user. """ file_path = join(dirname, filename) @@ -123,32 +128,37 @@ def _fetch_remote(url, filename, dirname = "remote_datasets", file_checksum = No return file_path +def _get_archive_and_dir(dirname, filename, label): + if dirname is None: + dirname = join(get_data_home(dirname), label) + makedirs(dirname, exist_ok=True) + else: + dirname = get_data_home(dirname) + + archive_path = join(dirname, filename) + + return archive_path, dirname + def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): """ - Fetch "spiral_2d.npy" remotely. + Fetch spiral_2d dataset remotely. Parameters ---------- filename : string The name to give to downloaded file. Default is "spiral_2d.npy". dirname : string - The directory to save the file to. Default is None, meaning that the data home will be set to "~/remote_datasets/spiral_2d". + The directory to save the file to. Default is None, meaning that the data home will be set to "~/gudhi_data/spiral_2d". Returns ------- points: array - Array of points stored in "spiral_2d.npy". + Array of points. """ file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy" file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf' - if dirname is None: - dirname = join(get_data_home(dirname), "spiral_2d") - makedirs(dirname, exist_ok=True) - else: - dirname = get_data_home(dirname) - - archive_path = join(dirname, filename) + archive_path, dirname = _get_archive_and_dir(dirname, filename, "spiral_2d") if not exists(archive_path): file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum) @@ -159,14 +169,14 @@ def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): """ - Fetch "bunny.npy" remotely and its LICENSE file. + Fetch Stanford bunny dataset remotely and its LICENSE file. Parameters ---------- filename : string The name to give to downloaded file. Default is "bunny.npy". dirname : string - The directory to save the file to. Default is None, meaning that the data home will be set to "~/remote_datasets/bunny". + The directory to save the file to. Default is None, meaning that the data home will be set to "~/gudhi_data/bunny". accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. Default is False. @@ -174,7 +184,7 @@ def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): Returns ------- points: array - Array of points stored in "bunny.npy". + Array of points. """ file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy" @@ -182,13 +192,7 @@ def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE" license_checksum = 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a' - if dirname is None: - dirname = join(get_data_home(dirname), "bunny") - makedirs(dirname, exist_ok=True) - else: - dirname = get_data_home(dirname) - - archive_path = join(dirname, filename) + archive_path, dirname = _get_archive_and_dir(dirname, filename, "bunny") if not exists(archive_path): license_path = _fetch_remote(license_url, "LICENSE", dirname, license_checksum) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index cb53cb85..c44ac22b 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -22,7 +22,7 @@ def _check_dir_file_names(path_file_dw, filename, dirname): assert isfile(path_file_dw) names_dw = re.split(r' |/|\\', path_file_dw) - # Case where inner directories are created in "remote_datasets/"; e.g: "remote_datasets/bunny" + # Case where inner directories are created in "test_gudhi_data/"; e.g: "test_gudhi_data/bunny" if len(names_dw) >= 3: for i in range(len(names_dw)-1): assert re.split(r' |/|\\', dirname)[i] == names_dw[i] @@ -31,7 +31,7 @@ def _check_dir_file_names(path_file_dw, filename, dirname): assert dirname == names_dw[0] assert filename == names_dw[1] -def _check_fetch_output(url, filename, dirname = "remote_datasets", file_checksum = None): +def _check_fetch_output(url, filename, dirname = "test_gudhi_data", file_checksum = None): makedirs(dirname, exist_ok=True) path_file_dw = remote._fetch_remote(url, filename, dirname, file_checksum) _check_dir_file_names(path_file_dw, filename, dirname) @@ -41,9 +41,9 @@ def _get_bunny_license_print(accept_license = False): # Redirect stdout sys.stdout = capturedOutput - makedirs("remote_datasets/bunny", exist_ok=True) + makedirs("test_gudhi_data/bunny", exist_ok=True) - remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "remote_datasets/bunny", + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "test_gudhi_data/bunny", '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b', accept_license) # Reset redirect sys.stdout = sys.__stdout__ @@ -68,19 +68,21 @@ def test_fetch_remote_datasets(): # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) # Fetch LICENSE file - makedirs("remote_datasets/bunny", exist_ok=True) - remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "remote_datasets/bunny", + makedirs("test_gudhi_data/bunny", exist_ok=True) + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "test_gudhi_data/bunny", 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a') - with open("remote_datasets/bunny/LICENSE") as f: + with open("test_gudhi_data/bunny/LICENSE") as f: assert f.read().rstrip("\n") == _get_bunny_license_print().getvalue().rstrip("\n") # Test not printing bunny.npy LICENSE when accept_license = True assert "" == _get_bunny_license_print(accept_license = True).getvalue() - # Remove "remote_datasets" directory and all its content - shutil.rmtree("remote_datasets") + # Remove "test_gudhi_data" directory and all its content + shutil.rmtree("test_gudhi_data") def test_fetch_remote_datasets_wrapped(): + # Check if gudhi_data default dir exists already + to_be_removed = not isdir(expanduser("~/gudhi_data")) # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) for i in range(2): spiral_2d_arr = remote.fetch_spiral_2d() @@ -90,29 +92,27 @@ def test_fetch_remote_datasets_wrapped(): assert bunny_arr.shape == (35947, 3) # Check that default dir was created - assert isdir(expanduser("~/remote_datasets")) + assert isdir(expanduser("~/gudhi_data")) # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default - spiral_2d_arr = remote.fetch_spiral_2d(dirname = "~/another_fetch_folder") + spiral_2d_arr = remote.fetch_spiral_2d(dirname = "./another_fetch_folder_for_test") assert spiral_2d_arr.shape == (114562, 2) - bunny_arr = remote.fetch_bunny(dirname = "~/another_fetch_folder") + bunny_arr = remote.fetch_bunny(dirname = "./another_fetch_folder_for_test") assert bunny_arr.shape == (35947, 3) - assert isdir(expanduser("~/another_fetch_folder")) + assert isdir(expanduser("./another_fetch_folder_for_test")) # Remove test folders del spiral_2d_arr del bunny_arr - shutil.rmtree(expanduser("~/remote_datasets")) - shutil.rmtree(expanduser("~/another_fetch_folder")) - - assert not isdir(expanduser("~/remote_datasets")) - assert not isdir(expanduser("~/another_fetch_folder")) + if to_be_removed: + shutil.rmtree(expanduser("~/gudhi_data")) + shutil.rmtree(expanduser("./another_fetch_folder_for_test")) def test_data_home(): # Test get_data_home and clear_data_home on new empty folder - empty_data_home = remote.get_data_home(data_home="empty_folder") + empty_data_home = remote.get_data_home(data_home="empty_folder_for_test") assert isdir(empty_data_home) remote.clear_data_home(data_home=empty_data_home) -- cgit v1.2.3 From af1bdcad5e8638ba9aa8b381aaabd5fc9cc5ae4e Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Wed, 2 Mar 2022 15:31:36 +0100 Subject: fix sphinx test for rips --- src/python/doc/rips_complex_tflow_itf_ref.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 83421b2a..6974f92d 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -11,7 +11,6 @@ Example of gradient computed from Vietoris-Rips persistence ----------------------------------------------------------- .. testsetup:: - :hide: import numpy numpy.set_printoptions(precision=4) @@ -30,6 +29,9 @@ Example of gradient computed from Vietoris-Rips persistence grads = tape.gradient(loss, [X]) print(grads[0].numpy()) +.. testcleanup:: + numpy.set_printoptions(precision=8) + .. testoutput:: [[-0.5 -0.5] -- cgit v1.2.3 From 159f869e76ef609858c6208a2b9d4cf069d9a163 Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 2 Mar 2022 16:30:31 +0100 Subject: Missing empty lines in sphinx doc --- src/python/doc/rips_complex_tflow_itf_ref.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 6974f92d..6c65c562 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -11,6 +11,7 @@ Example of gradient computed from Vietoris-Rips persistence ----------------------------------------------------------- .. testsetup:: + import numpy numpy.set_printoptions(precision=4) @@ -30,6 +31,7 @@ Example of gradient computed from Vietoris-Rips persistence print(grads[0].numpy()) .. testcleanup:: + numpy.set_printoptions(precision=8) .. testoutput:: -- cgit v1.2.3 From 4afd99a7ec5e1836ccb2903428e67bd2941790fb Mon Sep 17 00:00:00 2001 From: Marc Glisse Date: Wed, 2 Mar 2022 16:32:34 +0100 Subject: Restore submodule --- ext/gudhi-deploy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/gudhi-deploy b/ext/gudhi-deploy index 975d1bff..290ade10 160000 --- a/ext/gudhi-deploy +++ b/ext/gudhi-deploy @@ -1 +1 @@ -Subproject commit 975d1bffb317f3b84bf1a3d576cdfdbf7b45861c +Subproject commit 290ade1086bedbc96a35df886cadecabbf4072e6 -- cgit v1.2.3 From 58e2f677081b4e9f21c47d6286b329218aa825d6 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 2 Mar 2022 17:58:39 +0100 Subject: Remove file when given checksum does not match Add more details to doc Remove default dirname value in _fetch_remote Add points/ subfolder in fetching functions --- src/python/gudhi/datasets/remote.py | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 618fa80e..8b3baef4 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -8,7 +8,7 @@ # - YYYY/MM Author: Description of the modification from os.path import join, exists, expanduser -from os import makedirs +from os import makedirs, remove from urllib.request import urlretrieve import hashlib @@ -77,7 +77,7 @@ def _checksum_sha256(file_path): sha256_hash.update(buffer) return sha256_hash.hexdigest() -def _fetch_remote(url, filename, dirname = "gudhi_data", file_checksum = None, accept_license = False): +def _fetch_remote(url, filename, dirname, file_checksum = None, accept_license = False): """ Fetch the wanted dataset from the given url and save it in file_path. @@ -88,7 +88,7 @@ def _fetch_remote(url, filename, dirname = "gudhi_data", file_checksum = None, a filename : string The name to give to downloaded file. dirname : string - The directory to save the file to. Default is "gudhi_data". + The directory to save the file to. file_checksum : string The file checksum using sha256 to check against the one computed on the downloaded file. Default is 'None', which means the checksum is not checked. @@ -115,6 +115,8 @@ def _fetch_remote(url, filename, dirname = "gudhi_data", file_checksum = None, a if file_checksum is not None: checksum = _checksum_sha256(file_path) if file_checksum != checksum: + # Remove file and raise error + remove(file_path) raise IOError("{} has a SHA256 checksum : {}, " "different from expected : {}." "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, file_checksum)) @@ -148,17 +150,17 @@ def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): filename : string The name to give to downloaded file. Default is "spiral_2d.npy". dirname : string - The directory to save the file to. Default is None, meaning that the data home will be set to "~/gudhi_data/spiral_2d". + The directory to save the file to. Default is None, meaning that the downloaded file will be put in "~/gudhi_data/points/spiral_2d". Returns ------- - points: array - Array of points. + points: numpy array + Array of shape (114562, 2). """ file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy" file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf' - archive_path, dirname = _get_archive_and_dir(dirname, filename, "spiral_2d") + archive_path, dirname = _get_archive_and_dir(dirname, filename, "points/spiral_2d") if not exists(archive_path): file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum) @@ -170,21 +172,22 @@ def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): """ Fetch Stanford bunny dataset remotely and its LICENSE file. + This dataset contains 35947 vertices. Parameters ---------- filename : string The name to give to downloaded file. Default is "bunny.npy". dirname : string - The directory to save the file to. Default is None, meaning that the data home will be set to "~/gudhi_data/bunny". + The directory to save the file to. Default is None, meaning that the downloaded files will be put in "~/gudhi_data/points/bunny". accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. Default is False. Returns ------- - points: array - Array of points. + points: numpy array + Array of shape (35947, 3). """ file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy" @@ -192,7 +195,7 @@ def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE" license_checksum = 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a' - archive_path, dirname = _get_archive_and_dir(dirname, filename, "bunny") + archive_path, dirname = _get_archive_and_dir(dirname, filename, "points/bunny") if not exists(archive_path): license_path = _fetch_remote(license_url, "LICENSE", dirname, license_checksum) -- cgit v1.2.3 From af74316148165cb01c9f28b8b05e1b9764e4579a Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 4 Mar 2022 14:05:15 +0100 Subject: Use Euclidean_distance instead of CGAL dependant Sphere_circumradius for Rips in Cech benchmark --- src/Cech_complex/benchmark/cech_complex_benchmark.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index b283e1a8..bf013a81 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -107,8 +107,7 @@ int main(int argc, char* argv[]) { std::clog << itr->path().stem() << ";"; std::clog << radius << ";"; Gudhi::Clock rips_clock("Rips computation"); - Rips_complex rips_complex_from_points(off_reader_cgal.get_point_cloud(), radius, - Gudhi::cech_complex::Sphere_circumradius()); + Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius, Gudhi::Euclidean_distance()); Simplex_tree rips_stree; rips_complex_from_points.create_complex(rips_stree, p0.size() - 1); // ------------------------------------------ -- cgit v1.2.3 From 77cd751b729d6b68a49ae99e86cff481220ec367 Mon Sep 17 00:00:00 2001 From: Manu Date: Mon, 7 Mar 2022 09:55:17 +0100 Subject: minor changes in entropy --- src/python/gudhi/representations/vector_methods.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py index 57ca5999..7d6a7f27 100644 --- a/src/python/gudhi/representations/vector_methods.py +++ b/src/python/gudhi/representations/vector_methods.py @@ -518,8 +518,7 @@ class Entropy(BaseEstimator, TransformerMixin): new_diagram = np.empty(shape = [0, 2]) p = new_diagram[:,1] - L = sum(p) - p = p/L + p = p/np.sum(p) if self.mode == "scalar": ent = -np.dot(p, np.log(p)) Xfit.append(np.array([[ent]])) @@ -529,8 +528,7 @@ class Entropy(BaseEstimator, TransformerMixin): [px,py] = orig_diagram[j,:2] min_idx = np.clip(np.ceil((px - self.sample_range[0]) / step_x).astype(int), 0, self.resolution) max_idx = np.clip(np.ceil((py - self.sample_range[0]) / step_x).astype(int), 0, self.resolution) - for k in range(min_idx, max_idx): - ent[k] += (-1) * p[j] * np.log(p[j]) + ent[min_idx:max_idx]-=p[j]*np.log(p[j]) if self.normalized: ent = ent / np.linalg.norm(ent, ord=1) Xfit.append(np.reshape(ent,[1,-1])) -- cgit v1.2.3 From d5e1760353e7d6ba66975a90f9a2768a48f0abf8 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 8 Mar 2022 15:27:19 +0100 Subject: Modify cech benchmark to include both Epick and Epeck cases --- .../benchmark/cech_complex_benchmark.cpp | 177 +++++++++++++-------- 1 file changed, 109 insertions(+), 68 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index bf013a81..9cf24542 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -18,6 +18,7 @@ #include #include +#include #include "boost/filesystem.hpp" // includes all needed Boost.Filesystem declarations @@ -32,10 +33,6 @@ using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; using Proximity_graph = Gudhi::Proximity_graph; using Rips_complex = Gudhi::rips_complex::Rips_complex; -using Kernel = CGAL::Epick_d>; -using Point_cgal = typename Kernel::Point_d; -using Points_off_reader_cgal = Gudhi::Points_off_reader; -using Cech_complex = Gudhi::cech_complex::Cech_complex; class Minimal_enclosing_ball_radius { public: @@ -61,79 +58,123 @@ class Minimal_enclosing_ball_radius { } }; -int main(int argc, char* argv[]) { - std::string off_file_points = "tore3D_1307.off"; - Filtration_value threshold = 1e20; +enum distance_type { Euclidean_dist, Minimal_enclosing_ball_dist, CGAL_dist }; - // Extract the points from the file filepoints - Points_off_reader off_reader(off_file_points); - Points_off_reader_cgal off_reader_cgal(off_file_points); +template> +void benchmark_prox_graph(const std::string& off_file_points, const Filtration_value& threshold, const std::string& msg, distance_type dist = CGAL_dist) { + if (dist != CGAL_dist) { + std::cerr << "Error: when CGAL is used, the distance should be CGAL_dist" << std::endl; + exit(-1); + } + if (!use_cgal) { + std::cerr << "Warning: if kernel is given, CGAL will be used" << std::endl; + } + using Point_cgal = typename Kernel::Point_d; + using Points_off_reader_cgal = Gudhi::Points_off_reader; - Gudhi::Clock euclidean_clock("Gudhi::Euclidean_distance"); - // Compute the proximity graph of the points - Proximity_graph euclidean_prox_graph = Gudhi::compute_proximity_graph( - off_reader.get_point_cloud(), threshold, Gudhi::Euclidean_distance()); + // Extract the points from the file filepoints + Points_off_reader_cgal off_reader_cgal(off_file_points); - std::clog << euclidean_clock << std::endl; + Gudhi::Clock cgal_circumsphere_clock("Gudhi::cech_complex::Sphere_circumradius_cgal()"); + // Compute the proximity graph of the points + Proximity_graph cgal_circumsphere_prox_graph = Gudhi::compute_proximity_graph(off_reader_cgal.get_point_cloud(), threshold, + Gudhi::cech_complex::Sphere_circumradius()); + std::clog << msg << " - " << cgal_circumsphere_clock << std::endl; +} - Gudhi::Clock miniball_clock("Minimal_enclosing_ball_radius"); - // Compute the proximity graph of the points - Proximity_graph miniball_prox_graph = Gudhi::compute_proximity_graph( - off_reader.get_point_cloud(), threshold, Minimal_enclosing_ball_radius()); - std::clog << miniball_clock << std::endl; +template +void benchmark_prox_graph(const std::string& off_file_points, const Filtration_value& threshold, const std::string& msg, distance_type dist) { + // Extract the points from the file filepoints + Points_off_reader off_reader(off_file_points); + + if (dist == Euclidean_dist) { + Gudhi::Clock euclidean_clock("Gudhi::Euclidean_distance"); + // Compute the proximity graph of the points + Proximity_graph euclidean_prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), threshold, + Gudhi::Euclidean_distance()); + std::clog << msg << " - " << euclidean_clock << std::endl; + } + else if (dist == Minimal_enclosing_ball_dist) { + Gudhi::Clock miniball_clock("Minimal_enclosing_ball_radius"); + // Compute the proximity graph of the points + Proximity_graph miniball_prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), threshold, + Minimal_enclosing_ball_radius()); + std::clog << msg << " - " << miniball_clock << std::endl; + } + else { + std::cerr << "Error: when CGAL is not used, the distance should be either Euclidean_dist or Minimal_enclosing_ball_dist" << std::endl; + exit(-1); + } +} + +template +void benchmark_cech(const std::string& off_file_points, const Filtration_value& radius, const int& dim_max) { + using Point_cgal = typename Kernel::Point_d; + using Points_off_reader_cgal = Gudhi::Points_off_reader; + using Cech_complex = Gudhi::cech_complex::Cech_complex; + + // Extract the points from the file filepoints + Points_off_reader_cgal off_reader_cgal(off_file_points); + + Gudhi::Clock cech_clock("Cech computation"); + Cech_complex cech_complex_from_points(off_reader_cgal.get_point_cloud(), radius); + Simplex_tree cech_stree; + cech_complex_from_points.create_complex(cech_stree, dim_max); + + // ------------------------------------------ + // Display information about the Cech complex + // ------------------------------------------ + double cech_sec = cech_clock.num_seconds(); + std::clog << cech_sec << " ; "; + std::clog << cech_stree.num_simplices() << " ; "; +} - Gudhi::Clock cgal_circumsphere_clock("Gudhi::cech_complex::Sphere_circumradius_cgal()"); - // Compute the proximity graph of the points - Proximity_graph cgal_circumsphere_prox_graph = Gudhi::compute_proximity_graph( - off_reader_cgal.get_point_cloud(), threshold, Gudhi::cech_complex::Sphere_circumradius()); - std::clog << cgal_circumsphere_clock << std::endl; +int main(int argc, char* argv[]) { + std::string off_file_points = "tore3D_1307.off"; + Filtration_value threshold = 1e20; + + benchmark_prox_graph(off_file_points, threshold, "Euclidean distance", Euclidean_dist); + benchmark_prox_graph(off_file_points, threshold, "Minimal_enclosing_ball", Minimal_enclosing_ball_dist); + benchmark_prox_graph>>(off_file_points, threshold, "Epick"); + benchmark_prox_graph>>(off_file_points, threshold, "Epeck"); - boost::filesystem::path full_path(boost::filesystem::current_path()); - std::clog << "Current path is : " << full_path << std::endl; + boost::filesystem::path full_path(boost::filesystem::current_path()); + std::clog << "Current path is : " << full_path << std::endl; - std::clog << "File name; Radius; Rips time; Cech time; Ratio Rips/Cech time; Rips nb simplices; Cech nb simplices;" + std::clog << "File name ; Radius ; Rips time ; Epick Cech time ; Epick Cech nb simplices ; Epeck Cech time ; Epeck Cech nb simplices ; Rips nb simplices;" << std::endl; - boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end - for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) { - if (!boost::filesystem::is_directory(itr->status())) { - if (itr->path().extension() == ".off") // see below - { - Points_off_reader off_reader(itr->path().string()); - Points_off_reader_cgal off_reader_cgal(itr->path().string()); - - Point p0 = off_reader.get_point_cloud()[0]; - - for (Filtration_value radius = 0.1; radius < 0.4; radius += 0.1) { - std::clog << itr->path().stem() << ";"; - std::clog << radius << ";"; - Gudhi::Clock rips_clock("Rips computation"); - Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius, Gudhi::Euclidean_distance()); - Simplex_tree rips_stree; - rips_complex_from_points.create_complex(rips_stree, p0.size() - 1); - // ------------------------------------------ - // Display information about the Rips complex - // ------------------------------------------ - double rips_sec = rips_clock.num_seconds(); - std::clog << rips_sec << ";"; - - Gudhi::Clock cech_clock("Cech computation"); - Cech_complex cech_complex_from_points(off_reader_cgal.get_point_cloud(), radius); - Simplex_tree cech_stree; - cech_complex_from_points.create_complex(cech_stree, p0.size() - 1); - // ------------------------------------------ - // Display information about the Cech complex - // ------------------------------------------ - double cech_sec = cech_clock.num_seconds(); - std::clog << cech_sec << ";"; - std::clog << cech_sec / rips_sec << ";"; - - assert(rips_stree.num_simplices() >= cech_stree.num_simplices()); - std::clog << rips_stree.num_simplices() << ";"; - std::clog << cech_stree.num_simplices() << ";" << std::endl; + boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end + for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) { + if (!boost::filesystem::is_directory(itr->status())) { + if (itr->path().extension() == ".off") { + Points_off_reader off_reader(itr->path().string()); + Point p0 = off_reader.get_point_cloud()[0]; + + for (Filtration_value radius = 0.1; radius < 0.4; radius += 0.1) { + std::clog << itr->path().stem() << " ; "; + std::clog << radius << " ; "; + + Gudhi::Clock rips_clock("Rips computation"); + Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius, Gudhi::Euclidean_distance()); + Simplex_tree rips_stree; + rips_complex_from_points.create_complex(rips_stree, p0.size() - 1); + // ------------------------------------------ + // Display information about the Rips complex + // ------------------------------------------ + double rips_sec = rips_clock.num_seconds(); + std::clog << rips_sec << " ; "; + + // -------------- + // Cech complex + // -------------- + benchmark_cech>>(itr->path().string(), radius, p0.size() - 1); + benchmark_cech>>(itr->path().string(), radius, p0.size() - 1); + + std::clog << rips_stree.num_simplices() << ";" << std::endl; + } + } } - } } - } - return 0; + return 0; } -- cgit v1.2.3 From 0047eaacaffef2b3da6207123da3ef3d919c0b27 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 9 Mar 2022 15:56:23 +0100 Subject: Add bunny image to the datasets doc --- src/python/doc/datasets.rst | 6 ++++++ src/python/doc/img/bunny.png | Bin 0 -> 48040 bytes 2 files changed, 6 insertions(+) create mode 100644 src/python/doc/img/bunny.png diff --git a/src/python/doc/datasets.rst b/src/python/doc/datasets.rst index 4fa8a628..62b7dca0 100644 --- a/src/python/doc/datasets.rst +++ b/src/python/doc/datasets.rst @@ -112,6 +112,12 @@ Fetching datasets We provide some ready-to-use datasets that are not available by default when getting GUDHI, and need to be fetched explicitly. +.. figure:: ./img/bunny.png + :figclass: align-center + + 3D Stanford bunny with 35947 vertices. + + .. automodule:: gudhi.datasets.remote :members: :special-members: diff --git a/src/python/doc/img/bunny.png b/src/python/doc/img/bunny.png new file mode 100644 index 00000000..769aa530 Binary files /dev/null and b/src/python/doc/img/bunny.png differ -- cgit v1.2.3 From 17dc48527dcc8ee7e5eab95f9fdde3e236f4ad47 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 21 Mar 2022 17:47:59 +0100 Subject: extended_persistence uses directly get_persistent_pairs --- src/python/gudhi/simplex_tree.pxd | 4 +-- src/python/gudhi/simplex_tree.pyx | 3 +- .../include/Persistent_cohomology_interface.h | 40 ++++++++++++++++++++++ src/python/include/Simplex_tree_interface.h | 30 ---------------- src/python/test/test_simplex_tree.py | 4 +++ 5 files changed, 47 insertions(+), 34 deletions(-) diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd index 5c98fb4a..a8ed6d50 100644 --- a/src/python/gudhi/simplex_tree.pxd +++ b/src/python/gudhi/simplex_tree.pxd @@ -63,7 +63,6 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": bool prune_above_filtration(double filtration) nogil bool make_filtration_non_decreasing() nogil void compute_extended_filtration() nogil - vector[vector[pair[int, pair[double, double]]]] compute_extended_persistence_subdiagrams(vector[pair[int, pair[double, double]]] dgm, double min_persistence) nogil Simplex_tree_interface_full_featured* collapse_edges(int nb_collapse_iteration) nogil except + void reset_filtration(double filtration, int dimension) nogil bint operator==(Simplex_tree_interface_full_featured) nogil @@ -78,7 +77,7 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": pair[Simplex_tree_boundary_iterator, Simplex_tree_boundary_iterator] get_boundary_iterators(vector[int] simplex) nogil except + cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": - cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": + cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface>": Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max) nogil void compute_persistence(int homology_coeff_field, double min_persistence) nogil except + vector[pair[int, pair[double, double]]] get_persistence() nogil @@ -89,3 +88,4 @@ cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi": vector[pair[vector[int], vector[int]]] persistence_pairs() nogil pair[vector[vector[int]], vector[vector[int]]] lower_star_generators() nogil pair[vector[vector[int]], vector[vector[int]]] flag_generators() nogil + vector[vector[pair[int, pair[double, double]]]] compute_extended_persistence_subdiagrams(double min_persistence) nogil diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx index b8fabf78..1bbf1539 100644 --- a/src/python/gudhi/simplex_tree.pyx +++ b/src/python/gudhi/simplex_tree.pyx @@ -471,8 +471,7 @@ cdef class SimplexTree: del self.pcohptr self.pcohptr = new Simplex_tree_persistence_interface(self.get_ptr(), False) self.pcohptr.compute_persistence(homology_coeff_field, -1.) - persistence_result = self.pcohptr.get_persistence() - return self.get_ptr().compute_extended_persistence_subdiagrams(persistence_result, min_persistence) + return self.pcohptr.compute_extended_persistence_subdiagrams(min_persistence) def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False): diff --git a/src/python/include/Persistent_cohomology_interface.h b/src/python/include/Persistent_cohomology_interface.h index e5a3dfba..945378a0 100644 --- a/src/python/include/Persistent_cohomology_interface.h +++ b/src/python/include/Persistent_cohomology_interface.h @@ -12,6 +12,8 @@ #define INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_ #include +#include // for Extended_simplex_type + #include #include @@ -223,6 +225,44 @@ persistent_cohomology::Persistent_cohomology; + using Persistence_subdiagrams = std::vector>>; + + Persistence_subdiagrams compute_extended_persistence_subdiagrams(Filtration_value min_persistence){ + Persistence_subdiagrams pers_subs(4); + auto const& persistent_pairs = Base::get_persistent_pairs(); + for (auto pair : persistent_pairs) { + std::pair px = stptr_->decode_extended_filtration(stptr_->filtration(get<0>(pair)), + stptr_->efd); + std::pair py = stptr_->decode_extended_filtration(stptr_->filtration(get<1>(pair)), + stptr_->efd); + std::pair pd_point = std::make_pair(stptr_->dimension(get<0>(pair)), + std::make_pair(px.first, py.first)); + if(std::abs(px.first - py.first) > min_persistence){ + //Ordinary + if (px.second == Extended_simplex_type::UP && py.second == Extended_simplex_type::UP){ + pers_subs[0].push_back(pd_point); + } + // Relative + else if (px.second == Extended_simplex_type::DOWN && py.second == Extended_simplex_type::DOWN){ + pers_subs[1].push_back(pd_point); + } + else{ + // Extended+ + if (px.first < py.first){ + pers_subs[2].push_back(pd_point); + } + //Extended- + else{ + pers_subs[3].push_back(pd_point); + } + } + } + } + return pers_subs; + } + private: // A copy FilteredComplex* stptr_; diff --git a/src/python/include/Simplex_tree_interface.h b/src/python/include/Simplex_tree_interface.h index 629f6083..dc9d01d7 100644 --- a/src/python/include/Simplex_tree_interface.h +++ b/src/python/include/Simplex_tree_interface.h @@ -133,36 +133,6 @@ class Simplex_tree_interface : public Simplex_tree { return; } - std::vector>>> compute_extended_persistence_subdiagrams(const std::vector>>& dgm, Filtration_value min_persistence){ - std::vector>>> new_dgm(4); - for (unsigned int i = 0; i < dgm.size(); i++){ - std::pair px = this->decode_extended_filtration(dgm[i].second.first, this->efd); - std::pair py = this->decode_extended_filtration(dgm[i].second.second, this->efd); - std::pair> pd_point = std::make_pair(dgm[i].first, std::make_pair(px.first, py.first)); - if(std::abs(px.first - py.first) > min_persistence){ - //Ordinary - if (px.second == Extended_simplex_type::UP && py.second == Extended_simplex_type::UP){ - new_dgm[0].push_back(pd_point); - } - // Relative - else if (px.second == Extended_simplex_type::DOWN && py.second == Extended_simplex_type::DOWN){ - new_dgm[1].push_back(pd_point); - } - else{ - // Extended+ - if (px.first < py.first){ - new_dgm[2].push_back(pd_point); - } - //Extended- - else{ - new_dgm[3].push_back(pd_point); - } - } - } - } - return new_dgm; - } - Simplex_tree_interface* collapse_edges(int nb_collapse_iteration) { #ifdef GUDHI_USE_EIGEN3 using Filtered_edge = std::tuple; diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py index a8180ce8..23458eb2 100755 --- a/src/python/test/test_simplex_tree.py +++ b/src/python/test/test_simplex_tree.py @@ -320,6 +320,10 @@ def test_extend_filtration(): ] dgms = st.extended_persistence(min_persistence=-1.) + assert len(dgms) == 4 + # Sort by (death-birth) descending - we are only interested in those with the longest life span + for idx in range(4): + dgms[idx] = sorted(dgms[idx], key=lambda x:(-abs(x[1][0]-x[1][1]))) assert dgms[0][0][1][0] == pytest.approx(2.) assert dgms[0][0][1][1] == pytest.approx(3.) -- cgit v1.2.3 From 27f8df308e3ed935e4ef9f62d23717efebdf36ae Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Tue, 12 Apr 2022 15:21:02 +0200 Subject: fix doc + reshape in cubical --- ext/gudhi-deploy | 2 +- src/python/doc/cubical_complex_tflow_itf_ref.rst | 2 +- src/python/gudhi/tensorflow/cubical_layer.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ext/gudhi-deploy b/ext/gudhi-deploy index 290ade10..975d1bff 160000 --- a/ext/gudhi-deploy +++ b/ext/gudhi-deploy @@ -1 +1 @@ -Subproject commit 290ade1086bedbc96a35df886cadecabbf4072e6 +Subproject commit 975d1bffb317f3b84bf1a3d576cdfdbf7b45861c diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index 18b97adf..881a2950 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -19,7 +19,7 @@ Example of gradient computed from cubical persistence cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0][0] + dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 369b0e54..31c44205 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -72,5 +72,5 @@ class CubicalLayer(tf.keras.layers.Layer): if min_pers >= 0: finite_dgm = self.dgms[idx_dim] persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers) - self.dgms[idx_dim] = tf.gather(finite_dgm, indices=persistent_indices) + self.dgms[idx_dim] = tf.reshape(tf.gather(finite_dgm, indices=persistent_indices), [-1,2]) return self.dgms -- cgit v1.2.3 From cc723a7a3735a44491bd1085b6bb6c47272b73ed Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Sat, 16 Apr 2022 11:21:09 +0200 Subject: fix test --- src/python/test/test_diff.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index bab0d10c..e0c99d07 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -22,7 +22,7 @@ def test_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0][0] + dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 @@ -34,7 +34,7 @@ def test_nonsquare_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0][0] + dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.5,-0.5],[0.,0.,0.]])).sum() <= 1e-6 -- cgit v1.2.3 From dbb65c3f3eb82d080e47b40b52deb03814d8da31 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 25 Apr 2022 13:18:11 +0200 Subject: Remove proximity_graph computation benchmark Add Dynamic_dimension_tag case --- .../benchmark/cech_complex_benchmark.cpp | 97 ++-------------------- 1 file changed, 8 insertions(+), 89 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index 9cf24542..d2a71879 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -10,12 +10,10 @@ #include #include -#include #include #include #include #include -#include #include #include @@ -29,86 +27,11 @@ using Simplex_tree = Gudhi::Simplex_tree<>; using Filtration_value = Simplex_tree::Filtration_value; using Point = std::vector; -using Point_cloud = std::vector; using Points_off_reader = Gudhi::Points_off_reader; -using Proximity_graph = Gudhi::Proximity_graph; using Rips_complex = Gudhi::rips_complex::Rips_complex; -class Minimal_enclosing_ball_radius { - public: - // boost::range_value is not SFINAE-friendly so we cannot use it in the return type - template - typename std::iterator_traits::type>::value_type operator()( - const Point& p1, const Point& p2) const { - // Type def - using Point_cloud = std::vector; - using Point_iterator = typename Point_cloud::const_iterator; - using Coordinate_iterator = typename Point::const_iterator; - using Min_sphere = - typename Gudhi::Miniball::Miniball>; - - Point_cloud point_cloud; - point_cloud.push_back(p1); - point_cloud.push_back(p2); - - GUDHI_CHECK((p1.end() - p1.begin()) == (p2.end() - p2.begin()), "inconsistent point dimensions"); - Min_sphere min_sphere(p1.end() - p1.begin(), point_cloud.begin(), point_cloud.end()); - - return std::sqrt(min_sphere.squared_radius()); - } -}; - -enum distance_type { Euclidean_dist, Minimal_enclosing_ball_dist, CGAL_dist }; - -template> -void benchmark_prox_graph(const std::string& off_file_points, const Filtration_value& threshold, const std::string& msg, distance_type dist = CGAL_dist) { - if (dist != CGAL_dist) { - std::cerr << "Error: when CGAL is used, the distance should be CGAL_dist" << std::endl; - exit(-1); - } - if (!use_cgal) { - std::cerr << "Warning: if kernel is given, CGAL will be used" << std::endl; - } - using Point_cgal = typename Kernel::Point_d; - using Points_off_reader_cgal = Gudhi::Points_off_reader; - - // Extract the points from the file filepoints - Points_off_reader_cgal off_reader_cgal(off_file_points); - - Gudhi::Clock cgal_circumsphere_clock("Gudhi::cech_complex::Sphere_circumradius_cgal()"); - // Compute the proximity graph of the points - Proximity_graph cgal_circumsphere_prox_graph = Gudhi::compute_proximity_graph(off_reader_cgal.get_point_cloud(), threshold, - Gudhi::cech_complex::Sphere_circumradius()); - std::clog << msg << " - " << cgal_circumsphere_clock << std::endl; -} - -template -void benchmark_prox_graph(const std::string& off_file_points, const Filtration_value& threshold, const std::string& msg, distance_type dist) { - // Extract the points from the file filepoints - Points_off_reader off_reader(off_file_points); - - if (dist == Euclidean_dist) { - Gudhi::Clock euclidean_clock("Gudhi::Euclidean_distance"); - // Compute the proximity graph of the points - Proximity_graph euclidean_prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), threshold, - Gudhi::Euclidean_distance()); - std::clog << msg << " - " << euclidean_clock << std::endl; - } - else if (dist == Minimal_enclosing_ball_dist) { - Gudhi::Clock miniball_clock("Minimal_enclosing_ball_radius"); - // Compute the proximity graph of the points - Proximity_graph miniball_prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), threshold, - Minimal_enclosing_ball_radius()); - std::clog << msg << " - " << miniball_clock << std::endl; - } - else { - std::cerr << "Error: when CGAL is not used, the distance should be either Euclidean_dist or Minimal_enclosing_ball_dist" << std::endl; - exit(-1); - } -} - template -void benchmark_cech(const std::string& off_file_points, const Filtration_value& radius, const int& dim_max) { +Simplex_tree benchmark_cech(const std::string& off_file_points, const Filtration_value& radius, const int& dim_max) { using Point_cgal = typename Kernel::Point_d; using Points_off_reader_cgal = Gudhi::Points_off_reader; using Cech_complex = Gudhi::cech_complex::Cech_complex; @@ -126,23 +49,16 @@ void benchmark_cech(const std::string& off_file_points, const Filtration_value& // ------------------------------------------ double cech_sec = cech_clock.num_seconds(); std::clog << cech_sec << " ; "; - std::clog << cech_stree.num_simplices() << " ; "; + return cech_stree; } int main(int argc, char* argv[]) { - std::string off_file_points = "tore3D_1307.off"; - Filtration_value threshold = 1e20; - - benchmark_prox_graph(off_file_points, threshold, "Euclidean distance", Euclidean_dist); - benchmark_prox_graph(off_file_points, threshold, "Minimal_enclosing_ball", Minimal_enclosing_ball_dist); - benchmark_prox_graph>>(off_file_points, threshold, "Epick"); - benchmark_prox_graph>>(off_file_points, threshold, "Epeck"); - boost::filesystem::path full_path(boost::filesystem::current_path()); std::clog << "Current path is : " << full_path << std::endl; - std::clog << "File name ; Radius ; Rips time ; Epick Cech time ; Epick Cech nb simplices ; Epeck Cech time ; Epeck Cech nb simplices ; Rips nb simplices;" - << std::endl; + std::clog << "File name ; Radius ; Rips time ; Dim-3 Epick Cech time ; Dynamic_dim Epick Cech time ; " + "Dim-3 Epeck Cech time ; Dynamic_dim Epeck Cech time ; Cech nb simplices ; Rips nb simplices;" + << std::endl; boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) { if (!boost::filesystem::is_directory(itr->status())) { @@ -168,8 +84,11 @@ int main(int argc, char* argv[]) { // Cech complex // -------------- benchmark_cech>>(itr->path().string(), radius, p0.size() - 1); + benchmark_cech>(itr->path().string(), radius, p0.size() - 1); benchmark_cech>>(itr->path().string(), radius, p0.size() - 1); + auto cech_stree = benchmark_cech>(itr->path().string(), radius, p0.size() - 1); + std::clog << cech_stree.num_simplices() << " ; "; std::clog << rips_stree.num_simplices() << ";" << std::endl; } } -- cgit v1.2.3 From 8730db2e8d1a8663358168ff6a20881c97773002 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 25 Apr 2022 14:57:26 +0200 Subject: Remove cech_complex_step_by_step example and Miniball --- src/Cech_complex/doc/Intro_cech_complex.h | 4 - src/Cech_complex/example/CMakeLists.txt | 10 - .../example/cech_complex_step_by_step.cpp | 150 ------ src/Cech_complex/include/gudhi/Miniball.COPYRIGHT | 4 - src/Cech_complex/include/gudhi/Miniball.README | 26 - src/Cech_complex/include/gudhi/Miniball.hpp | 523 --------------------- src/common/doc/examples.h | 1 - src/common/doc/main_page.md | 2 +- 8 files changed, 1 insertion(+), 719 deletions(-) delete mode 100644 src/Cech_complex/example/cech_complex_step_by_step.cpp delete mode 100644 src/Cech_complex/include/gudhi/Miniball.COPYRIGHT delete mode 100644 src/Cech_complex/include/gudhi/Miniball.README delete mode 100644 src/Cech_complex/include/gudhi/Miniball.hpp diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h index 644fd6cc..595fb64b 100644 --- a/src/Cech_complex/doc/Intro_cech_complex.h +++ b/src/Cech_complex/doc/Intro_cech_complex.h @@ -62,10 +62,6 @@ namespace cech_complex { * This radius computation is the reason why the Cech_complex is taking much more time to be computed than the * \ref rips_complex but it offers more topological guarantees. * - * If the Cech_complex interfaces are not detailed enough for your need, please refer to - * - * cech_complex_step_by_step.cpp example, where the graph construction over the Simplex_tree is more detailed. - * * \subsection cechpointscloudexample Example from a point cloud * * This example builds the proximity graph from the given points, and maximal radius values. diff --git a/src/Cech_complex/example/CMakeLists.txt b/src/Cech_complex/example/CMakeLists.txt index 4d11ace2..7d52ed5e 100644 --- a/src/Cech_complex/example/CMakeLists.txt +++ b/src/Cech_complex/example/CMakeLists.txt @@ -1,16 +1,6 @@ project(Cech_complex_examples) if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1) - if (TARGET Boost::program_options) - add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp ) - target_link_libraries(Cech_complex_example_step_by_step Boost::program_options) - if (TBB_FOUND) - target_link_libraries(Cech_complex_example_step_by_step ${TBB_LIBRARIES}) - endif() - add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-d" "3") - endif() - add_executable ( Cech_complex_example_from_points cech_complex_example_from_points.cpp) if (TBB_FOUND) target_link_libraries(Cech_complex_example_from_points ${TBB_LIBRARIES}) diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp deleted file mode 100644 index 4401f6af..00000000 --- a/src/Cech_complex/example/cech_complex_step_by_step.cpp +++ /dev/null @@ -1,150 +0,0 @@ -/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2018 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#include -#include -#include -#include - -#include - -#include - -#include -#include -#include // infinity -#include // for pair -#include - -// ---------------------------------------------------------------------------- -// cech_complex_step_by_step is an example of each step that is required to -// build a Cech over a Simplex_tree. Please refer to cech_complex_example_from_points to see -// how to do the same thing with the Cech complex wrapper for less detailed -// steps. -// ---------------------------------------------------------------------------- - -// Types definition -using Simplex_tree = Gudhi::Simplex_tree<>; -using Simplex_handle = Simplex_tree::Simplex_handle; -using Filtration_value = Simplex_tree::Filtration_value; -using Kernel = CGAL::Epeck_d; -using Point = typename Kernel::Point_d; -using Points_off_reader = Gudhi::Points_off_reader; -using Proximity_graph = Gudhi::Proximity_graph; - -class Cech_blocker { - private: - using Point_cloud = std::vector; - - public: - bool operator()(Simplex_handle sh) { - std::vector points; - for (auto vertex : simplex_tree_.simplex_vertex_range(sh)) { - points.push_back(point_cloud_[vertex]); -#ifdef DEBUG_TRACES - std::clog << "#(" << vertex << ")#"; -#endif // DEBUG_TRACES - } - Filtration_value radius = Gudhi::cech_complex::Sphere_circumradius()(points); -#ifdef DEBUG_TRACES - std::clog << "radius = " << radius << " - " << (radius > max_radius_) << std::endl; -#endif // DEBUG_TRACES - simplex_tree_.assign_filtration(sh, radius); - return (radius > max_radius_); - } - Cech_blocker(Simplex_tree& simplex_tree, Filtration_value max_radius, const std::vector& point_cloud) - : simplex_tree_(simplex_tree), max_radius_(max_radius), point_cloud_(point_cloud) { - } - - private: - Simplex_tree simplex_tree_; - Filtration_value max_radius_; - std::vector point_cloud_; -}; - -void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max); - -int main(int argc, char* argv[]) { - std::string off_file_points; - Filtration_value max_radius; - int dim_max; - - program_options(argc, argv, off_file_points, max_radius, dim_max); - - // Extract the points from the file filepoints - Points_off_reader off_reader(off_file_points); - - // Compute the proximity graph of the points - Proximity_graph prox_graph = Gudhi::compute_proximity_graph(off_reader.get_point_cloud(), max_radius, - Gudhi::cech_complex::Sphere_circumradius()); - - // Construct the Cech complex in a Simplex Tree - Simplex_tree st; - // insert the proximity graph in the simplex tree - st.insert_graph(prox_graph); - // expand the graph until dimension dim_max - st.expansion_with_blockers(dim_max, Cech_blocker(st, max_radius, off_reader.get_point_cloud())); - - std::clog << "The complex contains " << st.num_simplices() << " simplices \n"; - std::clog << " and has dimension " << st.dimension() << " \n"; - - // Sort the simplices in the order of the filtration - st.initialize_filtration(); - -#if DEBUG_TRACES - std::clog << "********************************************************************\n"; - std::clog << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n"; - std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n"; - for (auto f_simplex : st.filtration_simplex_range()) { - std::clog << " " - << "[" << st.filtration(f_simplex) << "] "; - for (auto vertex : st.simplex_vertex_range(f_simplex)) { - std::clog << static_cast(vertex) << " "; - } - std::clog << std::endl; - } -#endif // DEBUG_TRACES - - return 0; -} - -void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max) { - namespace po = boost::program_options; - po::options_description hidden("Hidden options"); - hidden.add_options()("input-file", po::value(&off_file_points), - "Name of an OFF file containing a point set.\n"); - - po::options_description visible("Allowed options", 100); - visible.add_options()("help,h", "produce help message")( - "max-radius,r", - po::value(&max_radius)->default_value(std::numeric_limits::infinity()), - "Maximal length of an edge for the Cech complex construction.")( - "cpx-dimension,d", po::value(&dim_max)->default_value(1), - "Maximal dimension of the Cech complex we want to compute."); - - po::positional_options_description pos; - pos.add("input-file", 1); - - po::options_description all; - all.add(visible).add(hidden); - - po::variables_map vm; - po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm); - po::notify(vm); - - if (vm.count("help") || !vm.count("input-file")) { - std::clog << std::endl; - std::clog << "Construct a Cech complex defined on a set of input points.\n \n"; - - std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl; - std::clog << visible << std::endl; - exit(-1); - } -} diff --git a/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT b/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT deleted file mode 100644 index dbe4c553..00000000 --- a/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT +++ /dev/null @@ -1,4 +0,0 @@ -The miniball software is available under the GNU General Public License (GPLv3 - https://www.gnu.org/copyleft/gpl.html). -If your intended use is not compliant with this license, please buy a commercial license (EUR 500 - https://people.inf.ethz.ch/gaertner/subdir/software/miniball/license.html). -You need a license if the software that you develop using Miniball V3.0 is not open source. - diff --git a/src/Cech_complex/include/gudhi/Miniball.README b/src/Cech_complex/include/gudhi/Miniball.README deleted file mode 100644 index 033d8953..00000000 --- a/src/Cech_complex/include/gudhi/Miniball.README +++ /dev/null @@ -1,26 +0,0 @@ -https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html - -Smallest Enclosing Balls of Points - Fast and Robust in C++. -(high-quality software for smallest enclosing balls of balls is available in the computational geometry algorithms library CGAL) - - -This is the miniball software (V3.0) for computing smallest enclosing balls of points in arbitrary dimensions. It consists of a C++ header file Miniball.hpp (around 500 lines of code) and two example programs miniball_example.cpp and miniball_example_containers.cpp that demonstrate the usage. The first example stores the coordinates of the input points in a two-dimensional array, the second example uses a list of vectors to show how generic containers can be used. - -Credits: Aditya Gupta and Alexandros Konstantinakis-Karmis have significantly contributed to this version of the software. - -Changes - https://people.inf.ethz.ch/gaertner/subdir/software/miniball/changes.txt - from previous versions. - -The theory - https://people.inf.ethz.ch/gaertner/subdir/texts/own_work/esa99_final.pdf - behind the miniball software (Proc. 7th Annual European Symposium on Algorithms (ESA), Lecture Notes in Computer Science 1643, Springer-Verlag, pp.325-338, 1999). - -Main Features: - - Very fast in low dimensions. 1 million points in 5-space are processed within 0.05 seconds on any recent machine. - - High numerical stability. Almost all input degeneracies (cospherical points, multiple points, points very close together) are routinely handled. - - Easily integrates into your code. You can freely choose the coordinate type of your points and the container to store the points. If you still need to adapt the code, the header is small and readable and contains documentation for all major methods. - - -Changes done for the GUDHI version of MiniBall: - - Add include guard - - Move Miniball namespace inside a new Gudhi namespace diff --git a/src/Cech_complex/include/gudhi/Miniball.hpp b/src/Cech_complex/include/gudhi/Miniball.hpp deleted file mode 100644 index ce6cbb5b..00000000 --- a/src/Cech_complex/include/gudhi/Miniball.hpp +++ /dev/null @@ -1,523 +0,0 @@ -// Copright (C) 1999-2013, Bernd Gaertner -// $Rev: 3581 $ -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . -// -// Contact: -// -------- -// Bernd Gaertner -// Institute of Theoretical Computer Science -// ETH Zuerich -// CAB G31.1 -// CH-8092 Zuerich, Switzerland -// http://www.inf.ethz.ch/personal/gaertner - -#ifndef MINIBALL_HPP_ -#define MINIBALL_HPP_ - -#include -#include -#include -#include -#include - -namespace Gudhi { - -namespace Miniball { - - // Global Functions - // ================ - template - inline NT mb_sqr (NT r) {return r*r;} - - // Functors - // ======== - - // functor to map a point iterator to the corresponding coordinate iterator; - // generic version for points whose coordinate containers have begin() - template < typename Pit_, typename Cit_ > - struct CoordAccessor { - typedef Pit_ Pit; - typedef Cit_ Cit; - inline Cit operator() (Pit it) const { return (*it).begin(); } - }; - - // partial specialization for points whose coordinate containers are arrays - template < typename Pit_, typename Cit_ > - struct CoordAccessor { - typedef Pit_ Pit; - typedef Cit_* Cit; - inline Cit operator() (Pit it) const { return *it; } - }; - - // Class Declaration - // ================= - - template - class Miniball { - private: - // types - // The iterator type to go through the input points - typedef typename CoordAccessor::Pit Pit; - // The iterator type to go through the coordinates of a single point. - typedef typename CoordAccessor::Cit Cit; - // The coordinate type - typedef typename std::iterator_traits::value_type NT; - // The iterator to go through the support points - typedef typename std::list::iterator Sit; - - // data members... - const int d; // dimension - Pit points_begin; - Pit points_end; - CoordAccessor coord_accessor; - double time; - const NT nt0; // NT(0) - - //...for the algorithms - std::list L; - Sit support_end; - int fsize; // number of forced points - int ssize; // number of support points - - // ...for the ball updates - NT* current_c; - NT current_sqr_r; - NT** c; - NT* sqr_r; - - // helper arrays - NT* q0; - NT* z; - NT* f; - NT** v; - NT** a; - - public: - // The iterator type to go through the support points - typedef typename std::list::const_iterator SupportPointIterator; - - // PRE: [begin, end) is a nonempty range - // POST: computes the smallest enclosing ball of the points in the range - // [begin, end); the functor a maps a point iterator to an iterator - // through the d coordinates of the point - Miniball (int d_, Pit begin, Pit end, CoordAccessor ca = CoordAccessor()); - - // POST: returns a pointer to the first element of an array that holds - // the d coordinates of the center of the computed ball - const NT* center () const; - - // POST: returns the squared radius of the computed ball - NT squared_radius () const; - - // POST: returns the number of support points of the computed ball; - // the support points form a minimal set with the same smallest - // enclosing ball as the input set; in particular, the support - // points are on the boundary of the computed ball, and their - // number is at most d+1 - int nr_support_points () const; - - // POST: returns an iterator to the first support point - SupportPointIterator support_points_begin () const; - - // POST: returns a past-the-end iterator for the range of support points - SupportPointIterator support_points_end () const; - - // POST: returns the maximum excess of any input point w.r.t. the computed - // ball, divided by the squared radius of the computed ball. The - // excess of a point is the difference between its squared distance - // from the center and the squared radius; Ideally, the return value - // is 0. subopt is set to the absolute value of the most negative - // coefficient in the affine combination of the support points that - // yields the center. Ideally, this is a convex combination, and there - // is no negative coefficient in which case subopt is set to 0. - NT relative_error (NT& subopt) const; - - // POST: return true if the relative error is at most tol, and the - // suboptimality is 0; the default tolerance is 10 times the - // coordinate type's machine epsilon - bool is_valid (NT tol = NT(10) * std::numeric_limits::epsilon()) const; - - // POST: returns the time in seconds taken by the constructor call for - // computing the smallest enclosing ball - double get_time() const; - - // POST: deletes dynamically allocated arrays - ~Miniball(); - - private: - void mtf_mb (Sit n); - void mtf_move_to_front (Sit j); - void pivot_mb (Pit n); - void pivot_move_to_front (Pit j); - NT excess (Pit pit) const; - void pop (); - bool push (Pit pit); - NT suboptimality () const; - void create_arrays(); - void delete_arrays(); - }; - - // Class Definition - // ================ - template - Miniball::Miniball (int d_, Pit begin, Pit end, - CoordAccessor ca) - : d (d_), - points_begin (begin), - points_end (end), - coord_accessor (ca), - time (clock()), - nt0 (NT(0)), - L(), - support_end (L.begin()), - fsize(0), - ssize(0), - current_c (NULL), - current_sqr_r (NT(-1)), - c (NULL), - sqr_r (NULL), - q0 (NULL), - z (NULL), - f (NULL), - v (NULL), - a (NULL) - { - assert (points_begin != points_end); - create_arrays(); - - // set initial center - for (int j=0; j - Miniball::~Miniball() - { - delete_arrays(); - } - - template - void Miniball::create_arrays() - { - c = new NT*[d+1]; - v = new NT*[d+1]; - a = new NT*[d+1]; - for (int i=0; i - void Miniball::delete_arrays() - { - delete[] f; - delete[] z; - delete[] q0; - delete[] sqr_r; - for (int i=0; i - const typename Miniball::NT* - Miniball::center () const - { - return current_c; - } - - template - typename Miniball::NT - Miniball::squared_radius () const - { - return current_sqr_r; - } - - template - int Miniball::nr_support_points () const - { - assert (ssize < d+2); - return ssize; - } - - template - typename Miniball::SupportPointIterator - Miniball::support_points_begin () const - { - return L.begin(); - } - - template - typename Miniball::SupportPointIterator - Miniball::support_points_end () const - { - return support_end; - } - - template - typename Miniball::NT - Miniball::relative_error (NT& subopt) const - { - NT e, max_e = nt0; - // compute maximum absolute excess of support points - for (SupportPointIterator it = support_points_begin(); - it != support_points_end(); ++it) { - e = excess (*it); - if (e < nt0) e = -e; - if (e > max_e) { - max_e = e; - } - } - // compute maximum excess of any point - for (Pit i = points_begin; i != points_end; ++i) - if ((e = excess (i)) > max_e) - max_e = e; - - subopt = suboptimality(); - assert (current_sqr_r > nt0 || max_e == nt0); - return (current_sqr_r == nt0 ? nt0 : max_e / current_sqr_r); - } - - template - bool Miniball::is_valid (NT tol) const - { - NT suboptimality; - return ( (relative_error (suboptimality) <= tol) && (suboptimality == 0) ); - } - - template - double Miniball::get_time() const - { - return time; - } - - template - void Miniball::mtf_mb (Sit n) - { - // Algorithm 1: mtf_mb (L_{n-1}, B), where L_{n-1} = [L.begin, n) - // B: the set of forced points, defining the current ball - // S: the superset of support points computed by the algorithm - // -------------------------------------------------------------- - // from B. Gaertner, Fast and Robust Smallest Enclosing Balls, ESA 1999, - // http://www.inf.ethz.ch/personal/gaertner/texts/own_work/esa99_final.pdf - - // PRE: B = S - assert (fsize == ssize); - - support_end = L.begin(); - if ((fsize) == d+1) return; - - // incremental construction - for (Sit i = L.begin(); i != n;) - { - // INV: (support_end - L.begin() == |S|-|B|) - assert (std::distance (L.begin(), support_end) == ssize - fsize); - - Sit j = i++; - if (excess(*j) > nt0) - if (push(*j)) { // B := B + p_i - mtf_mb (j); // mtf_mb (L_{i-1}, B + p_i) - pop(); // B := B - p_i - mtf_move_to_front(j); - } - } - // POST: the range [L.begin(), support_end) stores the set S\B - } - - template - void Miniball::mtf_move_to_front (Sit j) - { - if (support_end == j) - support_end++; - L.splice (L.begin(), L, j); - } - - template - void Miniball::pivot_mb (Pit n) - { - // Algorithm 2: pivot_mb (L_{n-1}), where L_{n-1} = [L.begin, n) - // -------------------------------------------------------------- - // from B. Gaertner, Fast and Robust Smallest Enclosing Balls, ESA 1999, - // http://www.inf.ethz.ch/personal/gaertner/texts/own_work/esa99_final.pdf - NT old_sqr_r; - const NT* c; - Pit pivot, k; - NT e, max_e, sqr_r; - Cit p; - do { - old_sqr_r = current_sqr_r; - sqr_r = current_sqr_r; - - pivot = points_begin; - max_e = nt0; - for (k = points_begin; k != n; ++k) { - p = coord_accessor(k); - e = -sqr_r; - c = current_c; - for (int j=0; j(*p++-*c++); - if (e > max_e) { - max_e = e; - pivot = k; - } - } - - if (max_e > nt0) { - // check if the pivot is already contained in the support set - if (std::find(L.begin(), support_end, pivot) == support_end) { - assert (fsize == 0); - if (push (pivot)) { - mtf_mb(support_end); - pop(); - pivot_move_to_front(pivot); - } - } - } - } while (old_sqr_r < current_sqr_r); - } - - template - void Miniball::pivot_move_to_front (Pit j) - { - L.push_front(j); - if (std::distance(L.begin(), support_end) == d+2) - support_end--; - } - - template - inline typename Miniball::NT - Miniball::excess (Pit pit) const - { - Cit p = coord_accessor(pit); - NT e = -current_sqr_r; - NT* c = current_c; - for (int k=0; k(*p++-*c++); - } - return e; - } - - template - void Miniball::pop () - { - --fsize; - } - - template - bool Miniball::push (Pit pit) - { - int i, j; - NT eps = mb_sqr(std::numeric_limits::epsilon()); - - Cit cit = coord_accessor(pit); - Cit p = cit; - - if (fsize==0) { - for (i=0; i(v[fsize][j]); - z[fsize]*=2; - - // reject push if z_fsize too small - if (z[fsize](*p++-c[fsize-1][i]); - f[fsize]=e/z[fsize]; - - for (i=0; i - typename Miniball::NT - Miniball::suboptimality () const - { - NT* l = new NT[d+1]; - NT min_l = nt0; - l[0] = NT(1); - for (int i=ssize-1; i>0; --i) { - l[i] = f[i]; - for (int k=ssize-1; k>i; --k) - l[i]-=a[k][i]*l[k]; - if (l[i] < min_l) min_l = l[i]; - l[0] -= l[i]; - } - if (l[0] < min_l) min_l = l[0]; - delete[] l; - if (min_l < nt0) - return -min_l; - return nt0; - } -} // namespace Miniball - -} // namespace Gudhi - -#endif // MINIBALL_HPP_ diff --git a/src/common/doc/examples.h b/src/common/doc/examples.h index 879fb96a..0c4320f6 100644 --- a/src/common/doc/examples.h +++ b/src/common/doc/examples.h @@ -40,7 +40,6 @@ * @example edge_collapse_basic_example.cpp * \section Cech_complex_example_section Cech_complex * @example cech_persistence.cpp - * @example cech_complex_step_by_step.cpp * @example cech_complex_example_from_points.cpp * \section Bitmap_cubical_complex_example_section Bitmap_cubical_complex * @example periodic_cubical_complex_persistence.cpp diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index 17354179..6f995fee 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -181,7 +181,7 @@ Author: Vincent Rouvreau
Introduced in: GUDHI 2.2.0
Copyright: MIT [(GPL v3)](../../licensing/)
- Includes: [Miniball](https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html)
+ Requires: \ref cgal -- cgit v1.2.3 From a3d8a052e260c501d2feee2e63d3699b71baf549 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Mon, 25 Apr 2022 16:10:39 +0200 Subject: Use Dimension_tag<2> instead of Dynamic_dimension_tag in cech_complex_example_from_points.cpp --- .../example/cech_complex_example_from_points.cpp | 38 +++++++--------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp index 034077eb..ef9071ec 100644 --- a/src/Cech_complex/example/cech_complex_example_from_points.cpp +++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp @@ -6,42 +6,28 @@ #include #include #include -#include int main() { // Type definitions using Simplex_tree = Gudhi::Simplex_tree; using Filtration_value = Simplex_tree::Filtration_value; - using Kernel = CGAL::Epeck_d; - using FT = typename Kernel::FT; + using Kernel = CGAL::Epeck_d>; using Point = typename Kernel::Point_d; using Point_cloud = std::vector; using Cech_complex = Gudhi::cech_complex::Cech_complex; Point_cloud points; - - std::vector point0({1., 0.}); - points.emplace_back(point0.begin(), point0.end()); - std::vector point1({0., 1.}); - points.emplace_back(point1.begin(), point1.end()); - std::vector point2({2., 1.}); - points.emplace_back(point2.begin(), point2.end()); - std::vector point3({3., 2.}); - points.emplace_back(point3.begin(), point3.end()); - std::vector point4({0., 3.}); - points.emplace_back(point4.begin(), point4.end()); - std::vector point5({3. + std::sqrt(3.), 3.}); - points.emplace_back(point5.begin(), point5.end()); - std::vector point6({1., 4.}); - points.emplace_back(point6.begin(), point6.end()); - std::vector point7({3., 4.}); - points.emplace_back(point7.begin(), point7.end()); - std::vector point8({2., 4. + std::sqrt(3.)}); - points.emplace_back(point8.begin(), point8.end()); - std::vector point9({0., 4.}); - points.emplace_back(point9.begin(), point9.end()); - std::vector point10({-0.5, 2.}); - points.emplace_back(point10.begin(), point10.end()); + points.emplace_back(1., 0.); // 0 + points.emplace_back(0., 1.); // 1 + points.emplace_back(2., 1.); // 2 + points.emplace_back(3., 2.); // 3 + points.emplace_back(0., 3.); // 4 + points.emplace_back(3. + std::sqrt(3.), 3.); // 5 + points.emplace_back(1., 4.); // 6 + points.emplace_back(3., 4.); // 7 + points.emplace_back(2., 4. + std::sqrt(3.)); // 8 + points.emplace_back(0., 4.); // 9 + points.emplace_back(-0.5, 2.); // 10 // ---------------------------------------------------------------------------- // Init of a Cech complex from points -- cgit v1.2.3 From aec7ab1737a5284f4b7c2d1f7fe3eb7977df7537 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 26 Apr 2022 16:52:26 +0200 Subject: Modify cech doc Use Filtration_value instead of double for casting Use a templated range of points instead of vector in cech constructor Capitalize sphere_circumradius.h file name and make it private in doc --- src/Cech_complex/include/gudhi/Cech_complex.h | 19 +++---- .../include/gudhi/Cech_complex_blocker.h | 18 +++---- .../include/gudhi/Sphere_circumradius.h | 62 ++++++++++++++++++++++ .../include/gudhi/sphere_circumradius.h | 62 ---------------------- src/Cech_complex/test/test_cech_complex.cpp | 15 +++--- src/Cech_complex/utilities/cech_persistence.cpp | 1 - 6 files changed, 88 insertions(+), 89 deletions(-) create mode 100644 src/Cech_complex/include/gudhi/Sphere_circumradius.h delete mode 100644 src/Cech_complex/include/gudhi/sphere_circumradius.h diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index 375be1d2..fc39f75b 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -11,7 +11,7 @@ #ifndef CECH_COMPLEX_H_ #define CECH_COMPLEX_H_ -#include // for Gudhi::cech_complex::Sphere_circumradius +#include // for Gudhi::cech_complex::Sphere_circumradius #include // for Gudhi::Proximity_graph #include // for GUDHI_CHECK #include // for Gudhi::cech_complex::Cech_blocker @@ -25,15 +25,15 @@ namespace cech_complex { /** * \class Cech_complex - * \brief Cech complex data structure. + * \brief Cech complex class. * * \ingroup cech_complex * * \details - * The data structure is a proximity graph, containing edges when the edge length is less or equal - * to a given max_radius. Edge length is computed from `Gudhi::cech_complex::Sphere_circumradius` distance function. + * Cech complex is a simplicial complex constructed from a proximity graph, where the set of all simplices is filtered + * by the radius of their minimal enclosing ball and bounded by the given max_radius. * - * \tparam Kernel CGAL kernel. + * \tparam Kernel CGAL kernel: either Epick_d or Epeck_d. * * \tparam SimplicialComplexForCechComplex furnishes `Vertex_handle` and `Filtration_value` type definition required * by `Gudhi::Proximity_graph` and Cech blocker. @@ -58,15 +58,16 @@ class Cech_complex { using Sphere = typename cech_blocker::Sphere; public: - /** \brief Cech_complex constructor from a list of points. + /** \brief Cech_complex constructor from a range of points. * - * @param[in] points Vector of points where each point is defined as `kernel::Point_d`. + * @param[in] points Range of points where each point is defined as `kernel::Point_d`. * @param[in] max_radius Maximal radius value. * */ - Cech_complex(const Point_cloud & points, Filtration_value max_radius) : max_radius_(max_radius) { + template + Cech_complex(const InputPointRange & points, Filtration_value max_radius) : max_radius_(max_radius) { - point_cloud_.assign(points.begin(), points.end()); + point_cloud_.assign(std::begin(points), std::end(points)); cech_skeleton_graph_ = Gudhi::compute_proximity_graph( point_cloud_, max_radius_, Sphere_circumradius()); diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 1a696422..1a09f7e1 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -11,7 +11,7 @@ #ifndef CECH_COMPLEX_BLOCKER_H_ #define CECH_COMPLEX_BLOCKER_H_ -#include // for casting from FT to double +#include // for casting from FT to Filtration_value and double to FT #include #include @@ -36,7 +36,7 @@ namespace cech_complex { * * \tparam Cech_complex is required by the blocker. * - * \tparam Kernel CGAL kernel. + * \tparam Kernel CGAL kernel: either Epick_d or Epeck_d. */ template class Cech_blocker { @@ -69,8 +69,8 @@ class Cech_blocker { * \return true if the simplex radius is greater than the Cech_complex max_radius*/ bool operator()(Simplex_handle sh) { using Point_cloud = std::vector; - CGAL::NT_converter cast_to_double; - Filtration_value radius = 0.; + CGAL::NT_converter cast_to_fv; + Filtration_value radius = 0; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices Sphere min_enclos_ball; @@ -105,18 +105,18 @@ class Cech_blocker { face_points.clear(); if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { - radius = std::sqrt(cast_to_double(sph.second)); + radius = std::sqrt(cast_to_fv(sph.second)); #ifdef DEBUG_TRACES std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES - if (cast_to_double(sph.second) < cast_to_double(min_enclos_ball.second)) + if (sph.second < min_enclos_ball.second) min_enclos_ball = sph; break; } } // Get the minimal radius of all faces enclosing balls if exists - if(cast_to_double(min_enclos_ball.second) != std::numeric_limits::max()) { - radius = std::sqrt(cast_to_double(min_enclos_ball.second)); + if(min_enclos_ball.second != std::numeric_limits::max()) { + radius = std::sqrt(cast_to_fv(min_enclos_ball.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); cc_ptr_->get_cache().push_back(min_enclos_ball); @@ -128,7 +128,7 @@ class Cech_blocker { points.push_back(cc_ptr_->get_point(vertex)); } Sphere sph = get_sphere(points.cbegin(), points.cend()); - radius = std::sqrt(cast_to_double(sph.second)); + radius = std::sqrt(cast_to_fv(sph.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); cc_ptr_->get_cache().push_back(sph); diff --git a/src/Cech_complex/include/gudhi/Sphere_circumradius.h b/src/Cech_complex/include/gudhi/Sphere_circumradius.h new file mode 100644 index 00000000..b0d9f7cc --- /dev/null +++ b/src/Cech_complex/include/gudhi/Sphere_circumradius.h @@ -0,0 +1,62 @@ +/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. + * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. + * Author(s): Hind Montassif + * + * Copyright (C) 2021 Inria + * + * Modification(s): + * - YYYY/MM Author: Description of the modification + */ + +#ifndef SPHERE_CIRCUMRADIUS_H_ +#define SPHERE_CIRCUMRADIUS_H_ + +#include // for #include + +#include // for std::sqrt +#include + +namespace Gudhi { + +namespace cech_complex { + +/** \private @brief Compute the circumradius of the sphere passing through points given by a range of coordinates. + * The points are assumed to have the same dimension. */ +template +class Sphere_circumradius { + private: + Kernel kernel_; + public: + using Point = typename Kernel::Point_d; + using Point_cloud = typename std::vector; + + /** \brief Circumradius of sphere passing through two points using CGAL. + * + * @param[in] point_1 + * @param[in] point_2 + * @return Sphere circumradius passing through two points. + * \tparam Point must be a Kernel::Point_d from CGAL. + * + */ + double operator()(const Point& point_1, const Point& point_2) const { + return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; + } + + /** \brief Circumradius of sphere passing through point cloud using CGAL. + * + * @param[in] point_cloud The points. + * @return Sphere circumradius passing through the points. + * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. + * + */ + double operator()(const Point_cloud& point_cloud) const { + return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); + } + +}; + +} // namespace cech_complex + +} // namespace Gudhi + +#endif // SPHERE_CIRCUMRADIUS_H_ diff --git a/src/Cech_complex/include/gudhi/sphere_circumradius.h b/src/Cech_complex/include/gudhi/sphere_circumradius.h deleted file mode 100644 index a6dec3dc..00000000 --- a/src/Cech_complex/include/gudhi/sphere_circumradius.h +++ /dev/null @@ -1,62 +0,0 @@ -/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. - * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - * Author(s): Hind Montassif - * - * Copyright (C) 2021 Inria - * - * Modification(s): - * - YYYY/MM Author: Description of the modification - */ - -#ifndef SPHERE_CIRCUMRADIUS_H_ -#define SPHERE_CIRCUMRADIUS_H_ - -#include // for #include - -#include // for std::sqrt -#include - -namespace Gudhi { - -namespace cech_complex { - -/** @brief Compute the circumradius of the sphere passing through points given by a range of coordinates. - * The points are assumed to have the same dimension. */ -template -class Sphere_circumradius { - private: - Kernel kernel_; - public: - using Point = typename Kernel::Point_d; - using Point_cloud = typename std::vector; - - /** \brief Circumradius of sphere passing through two points using CGAL. - * - * @param[in] point_1 - * @param[in] point_2 - * @return Sphere circumradius passing through two points. - * \tparam Point must be a Kernel::Point_d from CGAL. - * - */ - double operator()(const Point& point_1, const Point& point_2) const { - return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; - } - - /** \brief Circumradius of sphere passing through point cloud using CGAL. - * - * @param[in] point_cloud The points. - * @return Sphere circumradius passing through the points. - * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. - * - */ - double operator()(const Point_cloud& point_cloud) const { - return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); - } - -}; - -} // namespace cech_complex - -} // namespace Gudhi - -#endif // SPHERE_CIRCUMRADIUS_H_ diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index 4cf8b68f..ea32f596 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -22,7 +22,6 @@ // to construct Cech_complex from a OFF file of points #include #include -#include #include #include // For EXACT or SAFE version @@ -139,18 +138,18 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { } Kernel kern; - Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2})); + Filtration_value f012 = st2.filtration(st2.find({0, 1, 2})); std::clog << "f012= " << f012 << std::endl; - CGAL::NT_converter cast_to_double; - GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points012.begin(), points012.end())))); + CGAL::NT_converter cast_to_fv; + GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(cast_to_fv(kern.compute_squared_radius_d_object()(points012.begin(), points012.end())))); Point_cloud points1410; points1410.push_back(cech_complex_for_doc.get_point(1)); points1410.push_back(cech_complex_for_doc.get_point(4)); points1410.push_back(cech_complex_for_doc.get_point(10)); - Simplex_tree::Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10})); + Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10})); std::clog << "f1410= " << f1410 << std::endl; // In this case, the computed circumsphere using CGAL kernel does not match the minimal enclosing ball; the filtration value check is therefore done against a hardcoded value @@ -161,10 +160,10 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { points469.push_back(cech_complex_for_doc.get_point(6)); points469.push_back(cech_complex_for_doc.get_point(9)); - Simplex_tree::Filtration_value f469 = st2.filtration(st2.find({4, 6, 9})); + Filtration_value f469 = st2.filtration(st2.find({4, 6, 9})); std::clog << "f469= " << f469 << std::endl; - GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(cast_to_double(kern.compute_squared_radius_d_object()(points469.begin(), points469.end())))); + GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(cast_to_fv(kern.compute_squared_radius_d_object()(points469.begin(), points469.end())))); BOOST_CHECK((st2.find({6, 7, 8}) == st2.null_simplex())); BOOST_CHECK((st2.find({3, 5, 7}) == st2.null_simplex())); @@ -246,7 +245,7 @@ BOOST_AUTO_TEST_CASE(Cech_create_complex_throw) { // // ---------------------------------------------------------------------------- std::string off_file_name("alphacomplexdoc.off"); - double max_radius = 12.0; + Filtration_value max_radius = 12.0; std::clog << "========== OFF FILE NAME = " << off_file_name << " - Cech max_radius=" << max_radius << "==========" << std::endl; diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp index 82992f2d..75d10c0f 100644 --- a/src/Cech_complex/utilities/cech_persistence.cpp +++ b/src/Cech_complex/utilities/cech_persistence.cpp @@ -9,7 +9,6 @@ */ #include -#include #include #include #include -- cgit v1.2.3 From b9119a92c5316a36e0ae8ff041f0625b51973321 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Wed, 27 Apr 2022 11:58:39 +0200 Subject: update doc + remove numpy/tensorflow mixup --- src/python/gudhi/tensorflow/cubical_layer.py | 2 +- src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py | 2 +- src/python/gudhi/tensorflow/rips_layer.py | 2 +- src/python/test/test_diff.py | 10 +++++----- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 31c44205..8db46a8e 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -58,7 +58,7 @@ class CubicalLayer(tf.keras.layers.Layer): X (TensorFlow variable): pixel values of the cubical complex Returns: - dgms (list of TensorFlow variables): list of cubical persistence diagrams of length self.dimensions, where each element contains a finite persistence diagram of shape [num_finite_points, 2] + dgms (list of TensorFlow variables): list of cubical persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, each element of this list is an array containing the finite part of the corresponding persistence diagram, of shape [num_finite_points, 2]. Note that there is no essential part since this part is always empty in cubical persistence diagrams, except in homology dimension zero, where the essential part always contains a single point, with abscissa equal to the smallest value in the complex, and infinite ordinate. """ # Compute pixels associated to positive and negative simplices # Don't compute gradient for this operation diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index cf7df6de..a2e48d8a 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -65,7 +65,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): F (TensorFlow variable): filter function values over the vertices of the simplex tree. The ith entry of F corresponds to vertex i in self.simplextree Returns: - dgms (list of tuple of TensorFlow variables): list of lower-star persistence diagrams of length self.dimensions, where each element of the list is a tuple that contains the finite and essential persistence diagrams of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively + dgms (list of tuple of TensorFlow variables): list of lower-star persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Don't try to compute gradients for the vertex pairs indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions) diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 7b5edfa3..b5b58ab4 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -63,7 +63,7 @@ class RipsLayer(tf.keras.layers.Layer): X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] Returns: - dgms (list of tuple of TensorFlow variables): list of Rips persistence diagrams of length self.dimensions, where each element of the list is a tuple that contains the finite and essential persistence diagrams of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively + dgms (list of tuple of TensorFlow variables): list of Rips persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Compute distance matrix DX = tf.norm(tf.expand_dims(X, 1)-tf.expand_dims(X, 0), axis=2) diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index e0c99d07..2529cf22 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -13,7 +13,7 @@ def test_rips_diff(): dgm = rl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) - assert np.abs(grads[0].numpy()-np.array([[-.5,-.5],[.5,.5]])).sum() <= 1e-6 + assert tf.norm(grads[0]-tf.constant([[-.5,-.5],[.5,.5]]),1) <= 1e-6 def test_cubical_diff(): @@ -25,7 +25,7 @@ def test_cubical_diff(): dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) - assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 + assert tf.norm(grads[0]-tf.constant([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]]),1) <= 1e-6 def test_nonsquare_cubical_diff(): @@ -37,7 +37,7 @@ def test_nonsquare_cubical_diff(): dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) - assert np.abs(grads[0].numpy()-np.array([[0.,0.5,-0.5],[0.,0.,0.]])).sum() <= 1e-6 + assert tf.norm(grads[0]-tf.constant([[0.,0.5,-0.5],[0.,0.,0.]]),1) <= 1e-6 def test_st_diff(): @@ -73,6 +73,6 @@ def test_st_diff(): loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [F]) - assert np.array_equal(np.array(grads[0].indices), np.array([2,4])) - assert np.array_equal(np.array(grads[0].values), np.array([-1,1])) + assert tf.math.reduce_all(tf.math.equal(grads[0].indices, tf.constant([2,4]))) + assert tf.math.reduce_all(tf.math.equal(grads[0].values, tf.constant([-1.,1.]))) -- cgit v1.2.3 From 70c20c20f89e2037544e7906c5743a30a7e3beb7 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 27 Apr 2022 16:41:22 +0200 Subject: Remove unnecessary code from cech blocker --- .../include/gudhi/Cech_complex_blocker.h | 41 +++++++++------------- 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 1a09f7e1..72876512 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -12,6 +12,7 @@ #define CECH_COMPLEX_BLOCKER_H_ #include // for casting from FT to Filtration_value and double to FT +#include #include #include @@ -73,10 +74,7 @@ class Cech_blocker { Filtration_value radius = 0; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices - Sphere min_enclos_ball; - CGAL::NT_converter cast_to_FT; - min_enclos_ball.second = cast_to_FT(std::numeric_limits::max()); - Point_cloud face_points; + boost::optional min_enclos_ball; for (auto face : sc_ptr_->boundary_simplex_range(sh)) { // Find which vertex of sh is missing in face. We rely on the fact that simplex_vertex_range is sorted. auto longlist = sc_ptr_->simplex_vertex_range(sh); @@ -88,41 +86,36 @@ class Cech_blocker { while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; } auto extra = *longiter; // Vertex_handle - for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { - face_points.push_back(cc_ptr_->get_point(vertex)); - #ifdef DEBUG_TRACES - std::clog << "#(" << vertex << ")#"; - #endif // DEBUG_TRACES - } Sphere sph; auto k = sc_ptr_->key(face); if(k != sc_ptr_->null_key()) { sph = cc_ptr_->get_cache().at(k); } else { + Point_cloud face_points; + for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { + face_points.push_back(cc_ptr_->get_point(vertex)); + #ifdef DEBUG_TRACES + std::clog << "#(" << vertex << ")#"; + #endif // DEBUG_TRACES + } sph = get_sphere(face_points.cbegin(), face_points.cend()); + face_points.clear(); } - face_points.clear(); - + // Check if the minimal enclosing ball of current face contains the extra point if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { - radius = std::sqrt(cast_to_fv(sph.second)); #ifdef DEBUG_TRACES std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES - if (sph.second < min_enclos_ball.second) - min_enclos_ball = sph; + radius = std::sqrt(cast_to_fv(sph.second)); + sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); + cc_ptr_->get_cache().push_back(sph); + min_enclos_ball.emplace(cc_ptr_->get_cache().back()); break; } } - // Get the minimal radius of all faces enclosing balls if exists - if(min_enclos_ball.second != std::numeric_limits::max()) { - radius = std::sqrt(cast_to_fv(min_enclos_ball.second)); - - sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); - cc_ptr_->get_cache().push_back(min_enclos_ball); - } - - if (radius == 0.) { // Spheres of each face don't contain the whole simplex + // Spheres of each face don't contain the whole simplex + if(!min_enclos_ball) { Point_cloud points; for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) { points.push_back(cc_ptr_->get_point(vertex)); -- cgit v1.2.3 From a14d9becec7361a2559fa239c1ca8f2c1b5c5768 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 27 Apr 2022 17:08:14 +0200 Subject: Ultimately, we don't really need to store the min enclosing ball in case it contains the extra point, a bool is enough --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 72876512..fb12946a 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -11,8 +11,7 @@ #ifndef CECH_COMPLEX_BLOCKER_H_ #define CECH_COMPLEX_BLOCKER_H_ -#include // for casting from FT to Filtration_value and double to FT -#include +#include // for casting from FT to Filtration_value #include #include @@ -72,9 +71,9 @@ class Cech_blocker { using Point_cloud = std::vector; CGAL::NT_converter cast_to_fv; Filtration_value radius = 0; + bool is_min_enclos_ball = false; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices - boost::optional min_enclos_ball; for (auto face : sc_ptr_->boundary_simplex_range(sh)) { // Find which vertex of sh is missing in face. We rely on the fact that simplex_vertex_range is sorted. auto longlist = sc_ptr_->simplex_vertex_range(sh); @@ -107,15 +106,15 @@ class Cech_blocker { #ifdef DEBUG_TRACES std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES + is_min_enclos_ball = true; radius = std::sqrt(cast_to_fv(sph.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); cc_ptr_->get_cache().push_back(sph); - min_enclos_ball.emplace(cc_ptr_->get_cache().back()); break; } } // Spheres of each face don't contain the whole simplex - if(!min_enclos_ball) { + if(!is_min_enclos_ball) { Point_cloud points; for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) { points.push_back(cc_ptr_->get_point(vertex)); -- cgit v1.2.3 From a6a68c11455a554619d8a5b5d2f92c1ddbf45e99 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 28 Apr 2022 16:38:34 +0200 Subject: Put edge sphere in cache --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index fb12946a..3141d27a 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -99,6 +99,10 @@ class Cech_blocker { #endif // DEBUG_TRACES } sph = get_sphere(face_points.cbegin(), face_points.cend()); + // Put edge sphere in cache + sc_ptr_->assign_key(face, cc_ptr_->get_cache().size()); + cc_ptr_->get_cache().push_back(sph); + // Clear face_points face_points.clear(); } // Check if the minimal enclosing ball of current face contains the extra point -- cgit v1.2.3 From ef8284cce27a8f11947e7f076034aa2fd8b5a395 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 4 May 2022 15:27:34 +0200 Subject: Ask for file_path as parameter of remote fetching functions instead of both dirname and filename Modify remote fetching test --- src/python/gudhi/datasets/remote.py | 106 +++++++++++++++----------------- src/python/test/test_remote_datasets.py | 94 ++++++++++------------------ 2 files changed, 83 insertions(+), 117 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 8b3baef4..5b535911 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -7,7 +7,7 @@ # Modification(s): # - YYYY/MM Author: Description of the modification -from os.path import join, exists, expanduser +from os.path import join, split, exists, expanduser from os import makedirs, remove from urllib.request import urlretrieve @@ -60,7 +60,7 @@ def _checksum_sha256(file_path): Parameters ---------- file_path: string - Full path of the created file. + Full path of the created file including filename. Returns ------- @@ -77,7 +77,7 @@ def _checksum_sha256(file_path): sha256_hash.update(buffer) return sha256_hash.hexdigest() -def _fetch_remote(url, filename, dirname, file_checksum = None, accept_license = False): +def _fetch_remote(url, file_path, file_checksum = None): """ Fetch the wanted dataset from the given url and save it in file_path. @@ -85,21 +85,11 @@ def _fetch_remote(url, filename, dirname, file_checksum = None, accept_license = ---------- url : string The url to fetch the dataset from. - filename : string - The name to give to downloaded file. - dirname : string - The directory to save the file to. + file_path : string + Full path of the downloaded file including filename. file_checksum : string The file checksum using sha256 to check against the one computed on the downloaded file. Default is 'None', which means the checksum is not checked. - accept_license : boolean - Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. - Default is False. - - Returns - ------- - file_path: string - Full path of the created file. Raises ------ @@ -107,8 +97,6 @@ def _fetch_remote(url, filename, dirname, file_checksum = None, accept_license = If the computed SHA256 checksum of file does not match the one given by the user. """ - file_path = join(dirname, filename) - # Get the file urlretrieve(url, file_path) @@ -121,36 +109,41 @@ def _fetch_remote(url, filename, dirname, file_checksum = None, accept_license = "different from expected : {}." "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, file_checksum)) - # Print license terms unless accept_license is set to True - if not accept_license: - license_file = join(dirname, "LICENSE") - if exists(license_file) and (file_path != license_file): - with open(license_file, 'r') as f: - print(f.read()) +def _get_archive_path(file_path, label): + """ + Get archive path based on file_path given by user and label. - return file_path + Parameters + ---------- + file_path: string + Full path of the file to get including filename, or None. + label: string + Label used along with 'data_home' to get archive path, in case 'file_path' is None. -def _get_archive_and_dir(dirname, filename, label): - if dirname is None: - dirname = join(get_data_home(dirname), label) + Returns + ------- + Full path of archive including filename. + """ + if file_path is None: + archive_path = join(get_data_home(), label) + dirname = split(archive_path)[0] makedirs(dirname, exist_ok=True) else: - dirname = get_data_home(dirname) - - archive_path = join(dirname, filename) + archive_path = file_path + dirname = split(archive_path)[0] + makedirs(dirname, exist_ok=True) - return archive_path, dirname + return archive_path -def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): +def fetch_spiral_2d(file_path = None): """ Fetch spiral_2d dataset remotely. Parameters ---------- - filename : string - The name to give to downloaded file. Default is "spiral_2d.npy". - dirname : string - The directory to save the file to. Default is None, meaning that the downloaded file will be put in "~/gudhi_data/points/spiral_2d". + file_path : string + Full path of the downloaded file including filename. + Default is None, meaning that it's set to "data_home/points/spiral_2d/spiral_2d.npy". Returns ------- @@ -158,28 +151,25 @@ def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): Array of shape (114562, 2). """ file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy" - file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf' + file_checksum = '2226024da76c073dd2f24b884baefbfd14928b52296df41ad2d9b9dc170f2401' - archive_path, dirname = _get_archive_and_dir(dirname, filename, "points/spiral_2d") + archive_path = _get_archive_path(file_path, "points/spiral_2d/spiral_2d.npy") if not exists(archive_path): - file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum) + _fetch_remote(file_url, archive_path, file_checksum) - return np.load(file_path_pkl, mmap_mode='r') - else: - return np.load(archive_path, mmap_mode='r') + return np.load(archive_path, mmap_mode='r') -def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): +def fetch_bunny(file_path = None, accept_license = False): """ Fetch Stanford bunny dataset remotely and its LICENSE file. This dataset contains 35947 vertices. Parameters ---------- - filename : string - The name to give to downloaded file. Default is "bunny.npy". - dirname : string - The directory to save the file to. Default is None, meaning that the downloaded files will be put in "~/gudhi_data/points/bunny". + file_path : string + Full path of the downloaded file including filename. + Default is None, meaning that it's set to "data_home/points/bunny/bunny.npy". accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. Default is False. @@ -191,16 +181,20 @@ def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): """ file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy" - file_checksum = '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b' - license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE" + file_checksum = 'f382482fd89df8d6444152dc8fd454444fe597581b193fd139725a85af4a6c6e' + license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.LICENSE" license_checksum = 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a' - archive_path, dirname = _get_archive_and_dir(dirname, filename, "points/bunny") + archive_path = _get_archive_path(file_path, "points/bunny/bunny.npy") if not exists(archive_path): - license_path = _fetch_remote(license_url, "LICENSE", dirname, license_checksum) - file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum, accept_license) - - return np.load(file_path_pkl, mmap_mode='r') - else: - return np.load(archive_path, mmap_mode='r') + _fetch_remote(file_url, archive_path, file_checksum) + license_path = join(split(archive_path)[0], "bunny.LICENSE") + _fetch_remote(license_url, license_path, license_checksum) + # Print license terms unless accept_license is set to True + if not accept_license: + if exists(license_path): + with open(license_path, 'r') as f: + print(f.read()) + + return np.load(archive_path, mmap_mode='r') diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index c44ac22b..5d0d397d 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -9,76 +9,48 @@ from gudhi.datasets import remote -import re import shutil import io import sys import pytest -from os.path import isfile, isdir, expanduser -from os import makedirs +from os.path import isdir, expanduser, exists +from os import remove -def _check_dir_file_names(path_file_dw, filename, dirname): - assert isfile(path_file_dw) +def test_data_home(): + # Test get_data_home and clear_data_home on new empty folder + empty_data_home = remote.get_data_home(data_home="empty_folder_for_test") + assert isdir(empty_data_home) - names_dw = re.split(r' |/|\\', path_file_dw) - # Case where inner directories are created in "test_gudhi_data/"; e.g: "test_gudhi_data/bunny" - if len(names_dw) >= 3: - for i in range(len(names_dw)-1): - assert re.split(r' |/|\\', dirname)[i] == names_dw[i] - assert filename == names_dw[i+1] - else: - assert dirname == names_dw[0] - assert filename == names_dw[1] + remote.clear_data_home(data_home=empty_data_home) + assert not isdir(empty_data_home) -def _check_fetch_output(url, filename, dirname = "test_gudhi_data", file_checksum = None): - makedirs(dirname, exist_ok=True) - path_file_dw = remote._fetch_remote(url, filename, dirname, file_checksum) - _check_dir_file_names(path_file_dw, filename, dirname) +def test_fetch_remote(): + # Test fetch with a wrong checksum + with pytest.raises(OSError): + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "tmp_spiral_2d.npy", file_checksum = 'XXXXXXXXXX') + assert not exists("tmp_spiral_2d.npy") def _get_bunny_license_print(accept_license = False): capturedOutput = io.StringIO() # Redirect stdout sys.stdout = capturedOutput - makedirs("test_gudhi_data/bunny", exist_ok=True) + bunny_arr = remote.fetch_bunny("./tmp_for_test/bunny.npy", accept_license) + assert bunny_arr.shape == (35947, 3) + remove("./tmp_for_test/bunny.npy") - remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "test_gudhi_data/bunny", - '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b', accept_license) # Reset redirect sys.stdout = sys.__stdout__ return capturedOutput -def test_fetch_remote_datasets(): - # Test fetch with a wrong checksum - with pytest.raises(OSError): - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy", file_checksum = 'XXXXXXXXXX') - - # Test files download from given urls with checksums provided - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy", - file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf') - - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", - file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') - - # Test files download from given urls without checksums - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy") - - _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") - - # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) - # Fetch LICENSE file - makedirs("test_gudhi_data/bunny", exist_ok=True) - remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "test_gudhi_data/bunny", - 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a') - with open("test_gudhi_data/bunny/LICENSE") as f: - assert f.read().rstrip("\n") == _get_bunny_license_print().getvalue().rstrip("\n") - +def test_print_bunny_license(): # Test not printing bunny.npy LICENSE when accept_license = True assert "" == _get_bunny_license_print(accept_license = True).getvalue() - - # Remove "test_gudhi_data" directory and all its content - shutil.rmtree("test_gudhi_data") + # Test printing bunny.LICENSE file when fetching bunny.npy with accept_license = False (default) + with open("./tmp_for_test/bunny.LICENSE") as f: + assert f.read().rstrip("\n") == _get_bunny_license_print().getvalue().rstrip("\n") + shutil.rmtree("./tmp_for_test") def test_fetch_remote_datasets_wrapped(): # Check if gudhi_data default dir exists already @@ -93,27 +65,27 @@ def test_fetch_remote_datasets_wrapped(): # Check that default dir was created assert isdir(expanduser("~/gudhi_data")) + # Check downloaded files + assert exists(expanduser("~/gudhi_data/points/spiral_2d/spiral_2d.npy")) + assert exists(expanduser("~/gudhi_data/points/bunny/bunny.npy")) + assert exists(expanduser("~/gudhi_data/points/bunny/bunny.LICENSE")) # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default - spiral_2d_arr = remote.fetch_spiral_2d(dirname = "./another_fetch_folder_for_test") + spiral_2d_arr = remote.fetch_spiral_2d("./another_fetch_folder_for_test/spiral_2d.npy") assert spiral_2d_arr.shape == (114562, 2) - bunny_arr = remote.fetch_bunny(dirname = "./another_fetch_folder_for_test") + bunny_arr = remote.fetch_bunny("./another_fetch_folder_for_test/bunny.npy") assert bunny_arr.shape == (35947, 3) - assert isdir(expanduser("./another_fetch_folder_for_test")) + assert isdir("./another_fetch_folder_for_test") + # Check downloaded files + assert exists("./another_fetch_folder_for_test/spiral_2d.npy") + assert exists("./another_fetch_folder_for_test/bunny.npy") + assert exists("./another_fetch_folder_for_test/bunny.LICENSE") # Remove test folders del spiral_2d_arr del bunny_arr if to_be_removed: shutil.rmtree(expanduser("~/gudhi_data")) - shutil.rmtree(expanduser("./another_fetch_folder_for_test")) - -def test_data_home(): - # Test get_data_home and clear_data_home on new empty folder - empty_data_home = remote.get_data_home(data_home="empty_folder_for_test") - assert isdir(empty_data_home) - - remote.clear_data_home(data_home=empty_data_home) - assert not isdir(empty_data_home) + shutil.rmtree("./another_fetch_folder_for_test") -- cgit v1.2.3 From 52d5b524403a43bfdc0b27a7feeec04e9c9c34c2 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 5 May 2022 17:43:12 +0200 Subject: Add GUDHI_DATA environment variable option --- src/python/gudhi/datasets/remote.py | 16 +++++++++++----- src/python/test/test_remote_datasets.py | 13 ++++++++++++- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 5b535911..eac8caf3 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -8,7 +8,7 @@ # - YYYY/MM Author: Description of the modification from os.path import join, split, exists, expanduser -from os import makedirs, remove +from os import makedirs, remove, environ from urllib.request import urlretrieve import hashlib @@ -21,13 +21,16 @@ def get_data_home(data_home = None): Return the path of the remote datasets directory. This folder is used to store remotely fetched datasets. By default the datasets directory is set to a folder named 'gudhi_data' in the user home folder. - Alternatively, it can be set by giving an explicit folder path. The '~' symbol is expanded to the user home folder. + Alternatively, it can be set by the 'GUDHI_DATA' environment variable. + The '~' symbol is expanded to the user home folder. If the folder does not already exist, it is automatically created. Parameters ---------- data_home : string - The path to remote datasets directory. Default is `None`, meaning that the data home directory will be set to "~/gudhi_data". + The path to remote datasets directory. + Default is `None`, meaning that the data home directory will be set to "~/gudhi_data", + if the 'GUDHI_DATA' environment variable does not exist. Returns ------- @@ -35,7 +38,7 @@ def get_data_home(data_home = None): The path to remote datasets directory. """ if data_home is None: - data_home = join("~", "gudhi_data") + data_home = environ.get("GUDHI_DATA", join("~", "gudhi_data")) data_home = expanduser(data_home) makedirs(data_home, exist_ok=True) return data_home @@ -48,7 +51,9 @@ def clear_data_home(data_home = None): Parameters ---------- data_home : string, default is None. - The path to remote datasets directory. If `None`, the default directory to be removed is set to "~/gudhi_data". + The path to remote datasets directory. + If `None` and the 'GUDHI_DATA' environment variable does not exist, + the default directory to be removed is set to "~/gudhi_data". """ data_home = get_data_home(data_home) shutil.rmtree(data_home) @@ -170,6 +175,7 @@ def fetch_bunny(file_path = None, accept_license = False): file_path : string Full path of the downloaded file including filename. Default is None, meaning that it's set to "data_home/points/bunny/bunny.npy". + In this case, the LICENSE file would be downloaded as "data_home/points/bunny/bunny.LICENSE". accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. Default is False. diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 5d0d397d..af26d77c 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -15,7 +15,7 @@ import sys import pytest from os.path import isdir, expanduser, exists -from os import remove +from os import remove, environ def test_data_home(): # Test get_data_home and clear_data_home on new empty folder @@ -89,3 +89,14 @@ def test_fetch_remote_datasets_wrapped(): if to_be_removed: shutil.rmtree(expanduser("~/gudhi_data")) shutil.rmtree("./another_fetch_folder_for_test") + +def test_gudhi_data_env(): + # Set environment variable "GUDHI_DATA" + environ["GUDHI_DATA"] = "./test_folder_from_env_var" + bunny_arr = remote.fetch_bunny() + assert bunny_arr.shape == (35947, 3) + assert exists("./test_folder_from_env_var/points/bunny/bunny.npy") + assert exists("./test_folder_from_env_var/points/bunny/bunny.LICENSE") + # Remove test folder + del bunny_arr + shutil.rmtree("./test_folder_from_env_var") -- cgit v1.2.3 From f344700ebee65de9ccc8799f2ec4e1c633ab864e Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 5 May 2022 18:07:52 +0200 Subject: Remove default data home test (because of 'GUDHI_DATA' environment variable option) --- src/python/test/test_remote_datasets.py | 26 +++++--------------------- 1 file changed, 5 insertions(+), 21 deletions(-) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index af26d77c..6f569fd2 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -53,30 +53,16 @@ def test_print_bunny_license(): shutil.rmtree("./tmp_for_test") def test_fetch_remote_datasets_wrapped(): - # Check if gudhi_data default dir exists already - to_be_removed = not isdir(expanduser("~/gudhi_data")) - # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) + # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default (twice, to test case of already fetched files) + # Default case is not tested because it would fail in case the user sets the 'GUDHI_DATA' environment variable locally for i in range(2): - spiral_2d_arr = remote.fetch_spiral_2d() + spiral_2d_arr = remote.fetch_spiral_2d("./another_fetch_folder_for_test/spiral_2d.npy") assert spiral_2d_arr.shape == (114562, 2) - bunny_arr = remote.fetch_bunny() + bunny_arr = remote.fetch_bunny("./another_fetch_folder_for_test/bunny.npy") assert bunny_arr.shape == (35947, 3) - # Check that default dir was created - assert isdir(expanduser("~/gudhi_data")) - # Check downloaded files - assert exists(expanduser("~/gudhi_data/points/spiral_2d/spiral_2d.npy")) - assert exists(expanduser("~/gudhi_data/points/bunny/bunny.npy")) - assert exists(expanduser("~/gudhi_data/points/bunny/bunny.LICENSE")) - - # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default - spiral_2d_arr = remote.fetch_spiral_2d("./another_fetch_folder_for_test/spiral_2d.npy") - assert spiral_2d_arr.shape == (114562, 2) - - bunny_arr = remote.fetch_bunny("./another_fetch_folder_for_test/bunny.npy") - assert bunny_arr.shape == (35947, 3) - + # Check that the directory was created assert isdir("./another_fetch_folder_for_test") # Check downloaded files assert exists("./another_fetch_folder_for_test/spiral_2d.npy") @@ -86,8 +72,6 @@ def test_fetch_remote_datasets_wrapped(): # Remove test folders del spiral_2d_arr del bunny_arr - if to_be_removed: - shutil.rmtree(expanduser("~/gudhi_data")) shutil.rmtree("./another_fetch_folder_for_test") def test_gudhi_data_env(): -- cgit v1.2.3 From a809771b6d7381d233656f7a0b02211559189bfe Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 6 May 2022 09:52:26 +0200 Subject: Delete bunny array before removing the file --- src/python/test/test_remote_datasets.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index 6f569fd2..cde9fa22 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -38,6 +38,7 @@ def _get_bunny_license_print(accept_license = False): bunny_arr = remote.fetch_bunny("./tmp_for_test/bunny.npy", accept_license) assert bunny_arr.shape == (35947, 3) + del bunny_arr remove("./tmp_for_test/bunny.npy") # Reset redirect -- cgit v1.2.3 From f3c3312995753e35b67887f505312c9ef11e734d Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 16 May 2022 16:43:12 +0200 Subject: Add instructions to compile gudhi in a conda env --- .github/how_to_compile_gudhi_in_a_conda_env.md | 97 ++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 .github/how_to_compile_gudhi_in_a_conda_env.md diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md new file mode 100644 index 00000000..3691b75e --- /dev/null +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -0,0 +1,97 @@ +# Install a conda development environment to compile GUDHI + +## Install miniconda + +Download the [installer](https://docs.conda.io/en/latest/miniconda.html) required by your system and follow the [instructions](https://conda.io/projects/conda/en/latest/user-guide/install/index.html). + +## Create a dedicated environment + +```bash +conda install -c conda-forge mamba # installation with mamba is faster +conda create --name gudhi +conda activate gudhi +mamba install -c conda-forge python cmake doxygen eigen cgal-cpp +``` + +Some of the requirements are in the gudhi repository (please refer to +[how to use github to contribute to gudhi](how_to_use_github_to_contribute_to_gudhi.md)). +In the gudhi repository - let's call it `/workdir/gudhi` i.e. - once submodules are initialised: + +```bash +pip install -r ext/gudhi-deploy/build-requirements.txt +pip install -r ext/gudhi-deploy/test-requirements.txt # pytorch can be painful to install - not mandatory +``` + +## Compilation + +In order to compile all c++ utilities, examples, benchmarks, unitary tests, and python module: +```bash +cd /workdir/gudhi +rm -rf build +mkdir build +cd build +# To build all even examples and benchmarks +cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_BENCHMARK=ON .. +``` + +### Specific python compilation + +In order to compile only python module +```bash +cd /workdir/gudhi +rm -rf build +mkdir build +cd build +# To build all even examples and benchmarks +cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. +cd src/python +# To build python module in parallel +python setup.py build_ext -j 16 --inplace +# to clean the build +# python setup.py clean --all +``` + +In order to use freshly compiled gudhi python module: +```bash +PYTHONPATH=/workdir/gudhi/build/src/python python # or ipython, jupyter, ... +``` + +### Specific C++ documentation generation + +```bash +cd /workdir/gudhi +rm -rf build +mkdir build +cd build +# python OFF to prevent python modules search makes cmake faster +cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_PYTHON=OFF -DUSER_VERSION_DIR=version .. +make user_version; +cd version +mkdir build +cd build +# python OFF to prevent python modules search makes cmake faster +cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_PYTHON=OFF .. +make doxygen 2>&1 | tee dox.log +grep warning dox.log # Warnings can be lost with parallel doxygen +firefox html/index.html # [optional] To display the c++ documentation. Anything else than firefox can be used. +``` + +### Specific python documentation generation + +```bash +cd /workdir/gudhi +rm -rf build +mkdir build +cd build +# python OFF to prevent python modules search makes cmake faster - it is the next one in user version that matters +cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_PYTHON=OFF -DUSER_VERSION_DIR=version .. +make user_version; +cd version +mkdir build +cd build +cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. +cd python +# To build python module in parallel +python setup.py build_ext -j 16 --inplace +firefox sphinx/index.html # [optional] To display the python documentation. Anything else than firefox can be used. +``` \ No newline at end of file -- cgit v1.2.3 From 0b0f67cee1e3841804f5773df5210860931a38fe Mon Sep 17 00:00:00 2001 From: albert-github Date: Wed, 18 May 2022 19:17:44 +0200 Subject: Documentation: make it easier to build only the documentation Introduce the possibility: ``` -Dbuild_documentation_only=ON ``` so that only the doxygen documentation can be build and we don't have to resort to installing all kinds of sub packages like Boost or manipulate the CMake files:w --- CMakeLists.txt | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index ac877eea..bff6d74d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,3 +1,6 @@ + +option(build_documentation_only "Build only the documentation with doxygen." OFF) + cmake_minimum_required(VERSION 3.5) project(GUDHIdev) @@ -13,8 +16,10 @@ set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES") # This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path set(GUDHI_PYTHON_PATH "src/python") -# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH -include(GUDHI_third_party_libraries NO_POLICY_SCOPE) +if (NOT build_documentation_only) + # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH + include(GUDHI_third_party_libraries NO_POLICY_SCOPE) +endif() include(GUDHI_compilation_flags) @@ -52,7 +57,9 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES}) endforeach() endforeach() -add_subdirectory(src/GudhUI) +if (NOT build_documentation_only) + add_subdirectory(src/GudhUI) +endif() if (WITH_GUDHI_PYTHON) # specific for cython module -- cgit v1.2.3 From 7fff2e5e725ced71da812d9f0bede1c8e0666e2e Mon Sep 17 00:00:00 2001 From: albert-github Date: Fri, 20 May 2022 12:07:56 +0200 Subject: Documentation: make it easier to build only the documentation After review: - option is now: ``` -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON ``` - added some instructions to the installation description. --- CMakeLists.txt | 6 +++--- src/common/doc/installation.h | 11 ++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index bff6d74d..47d87cd1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,5 +1,5 @@ -option(build_documentation_only "Build only the documentation with doxygen." OFF) +option(WITH_GUDHI_CPP_DOCUMENTATION_ONLY "Build only the GUDHI C++ documentation (with doxygen)." OFF) cmake_minimum_required(VERSION 3.5) @@ -16,7 +16,7 @@ set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES") # This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path set(GUDHI_PYTHON_PATH "src/python") -if (NOT build_documentation_only) +if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH include(GUDHI_third_party_libraries NO_POLICY_SCOPE) endif() @@ -57,7 +57,7 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES}) endforeach() endforeach() -if (NOT build_documentation_only) +if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) add_subdirectory(src/GudhUI) endif() diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 67d026bd..91043983 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -41,11 +41,16 @@ make \endverbatim * program). If some of the tests are failing, please send us the result of the following command: * \verbatim ctest --output-on-failure \endverbatim * - * \subsection documentationgeneration Documentation - * To generate the documentation, Doxygen is required. - * Run the following command in a terminal: + * \subsection documentationgeneration C++ documentation + * To generate the C++ documentation, for this the doxygen program + * is required, run the following command in a terminal: * \verbatim make doxygen \endverbatim * Documentation will be generated in a folder named html. + * + * In case there is not a full setup present and only the documentation should be build the following command sequence + * can be used: +\verbatim cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON .. +make doxygen\endverbatim * * \subsection helloworld Hello world ! * The Hello world for GUDHI -- cgit v1.2.3 From 9e3d0f79234fcc27ee10c4a4f36726775ee262f7 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Fri, 20 May 2022 17:54:27 +0200 Subject: Fix unbalanced groups and some doxygen typos --- .../Permutahedral_representation_iterators.h | 10 ++++++---- src/Persistent_cohomology/concept/FilteredComplex.h | 2 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 11 +++++++---- .../include/gudhi/Simplex_tree/Simplex_tree_iterators.h | 17 +++++++++-------- .../Simplex_tree/Simplex_tree_node_explicit_storage.h | 8 ++++---- .../include/gudhi/Simplex_tree/Simplex_tree_siblings.h | 12 ++++++------ .../include/gudhi/Simplex_tree/indexing_tag.h | 2 +- .../include/gudhi/Active_witness/Active_witness.h | 2 +- .../gudhi/Active_witness/Active_witness_iterator.h | 2 +- .../include/gudhi/Strong_witness_complex.h | 2 +- src/Witness_complex/include/gudhi/Witness_complex.h | 2 +- .../include/gudhi/Witness_complex/all_faces_in.h | 2 +- 12 files changed, 39 insertions(+), 33 deletions(-) diff --git a/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Permutahedral_representation_iterators.h b/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Permutahedral_representation_iterators.h index db145741..1a63d2f7 100644 --- a/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Permutahedral_representation_iterators.h +++ b/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Permutahedral_representation_iterators.h @@ -26,12 +26,12 @@ namespace Gudhi { namespace coxeter_triangulation { -/* \addtogroup coxeter_triangulation +/** \addtogroup coxeter_triangulation * Iterator types for Permutahedral_representation * @{ */ -/* \brief Iterator over the vertices of a simplex +/** \brief Iterator over the vertices of a simplex * represented by its permutahedral representation. * * Forward iterator, 'value_type' is Permutahedral_representation::Vertex.*/ @@ -83,7 +83,7 @@ class Vertex_iterator }; // Vertex_iterator /*---------------------------------------------------------------------------*/ -/* \brief Iterator over the k-faces of a simplex +/** \brief Iterator over the k-faces of a simplex * given by its permutahedral representation. * * Forward iterator, value_type is Permutahedral_representation. */ @@ -141,7 +141,7 @@ class Face_iterator : public boost::iterator_facade Dictionary; - /* \brief Set of nodes sharing a same parent in the simplex tree. */ - /* \brief Set of nodes sharing a same parent in the simplex tree. */ + /** \brief Set of nodes sharing a same parent in the simplex tree. */ typedef Simplex_tree_siblings Siblings; @@ -1338,7 +1341,7 @@ class Simplex_tree { } } - /* \private Returns the Simplex_handle composed of the vertex list (from the Simplex_handle), plus the given + /** \private Returns the Simplex_handle composed of the vertex list (from the Simplex_handle), plus the given * Vertex_handle if the Vertex_handle is found in the Simplex_handle children list. * Returns null_simplex() if it does not exist */ @@ -1801,7 +1804,7 @@ struct Simplex_tree_options_fast_persistence { static const bool contiguous_vertices = true; }; -/** @} */ // end defgroup simplex_tree +/** @}*/ // end addtogroup simplex_tree } // namespace Gudhi diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h index 394c6ee1..b63a5595 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h @@ -22,12 +22,12 @@ namespace Gudhi { -/* \addtogroup simplex_tree +/** \addtogroup simplex_tree * Iterators and range types for the Simplex_tree. - * @{ + * @{ */ -/* \brief Iterator over the vertices of a simplex +/** \brief Iterator over the vertices of a simplex * in a SimplexTree. * * Forward iterator, 'value_type' is SimplexTree::Vertex_handle.*/ @@ -73,7 +73,7 @@ class Simplex_tree_simplex_vertex_iterator : public boost::iterator_facade< }; /*---------------------------------------------------------------------------*/ -/* \brief Iterator over the simplices of the boundary of a +/** \brief Iterator over the simplices of the boundary of a * simplex. * * Forward iterator, value_type is SimplexTree::Simplex_handle.*/ @@ -181,7 +181,7 @@ class Simplex_tree_boundary_simplex_iterator : public boost::iterator_facade< SimplexTree * st_; // simplex containing the simplicial complex }; -/* \brief Iterator over the simplices of the boundary of a simplex and their opposite vertices. +/** \brief Iterator over the simplices of the boundary of a simplex and their opposite vertices. * * Forward iterator, value_type is std::pair.*/ template @@ -291,7 +291,7 @@ class Simplex_tree_boundary_opposite_vertex_simplex_iterator : public boost::ite }; /*---------------------------------------------------------------------------*/ -/* \brief Iterator over the simplices of a simplicial complex. +/** \brief Iterator over the simplices of a simplicial complex. * * Forward iterator, value_type is SimplexTree::Simplex_handle.*/ template @@ -364,7 +364,7 @@ class Simplex_tree_complex_simplex_iterator : public boost::iterator_facade< SimplexTree * st_; }; -/* \brief Iterator over the simplices of the skeleton of a given +/** \brief Iterator over the simplices of the skeleton of a given * dimension of the simplicial complex. * * Forward iterator, value_type is SimplexTree::Simplex_handle.*/ @@ -447,7 +447,8 @@ class Simplex_tree_skeleton_simplex_iterator : public boost::iterator_facade< int curr_dim_; }; -/* @} */ // end addtogroup simplex_tree +/** @}*/ // end addtogroup simplex_tree + } // namespace Gudhi #endif // SIMPLEX_TREE_SIMPLEX_TREE_ITERATORS_H_ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h index ae140859..b18fa029 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h @@ -15,13 +15,12 @@ namespace Gudhi { -/* \addtogroup simplex_tree +/** \addtogroup simplex_tree * Represents a node of a Simplex_tree. * @{ */ -/* - * \brief Node of a simplex tree with filtration value +/** \brief Node of a simplex tree with filtration value * and simplex key. * * It stores explicitely its own filtration value and its own Simplex_key. @@ -54,7 +53,8 @@ struct Simplex_tree_node_explicit_storage : SimplexTree::Filtration_simplex_base Siblings * children_; }; -/* @} */ // end addtogroup simplex_tree +/** @}*/ // end addtogroup simplex_tree + } // namespace Gudhi #endif // SIMPLEX_TREE_SIMPLEX_TREE_NODE_EXPLICIT_STORAGE_H_ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h index ae25d290..d849eeba 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h @@ -20,12 +20,12 @@ namespace Gudhi { -/* \addtogroup simplex_tree +/** \addtogroup simplex_tree * Represents a set of node of a Simplex_tree that share the same parent. * @{ */ -/* \brief Data structure to store a set of nodes in a SimplexTree sharing +/** \brief Data structure to store a set of nodes in a SimplexTree sharing * the same parent node.*/ template class Simplex_tree_siblings { @@ -58,7 +58,7 @@ class Simplex_tree_siblings { members_() { } - /* \brief Constructor with initialized set of members. + /** \brief Constructor with initialized set of members. * * 'members' must be sorted and unique.*/ template @@ -72,8 +72,7 @@ class Simplex_tree_siblings { } } - /* - * \brief Inserts a Node in the set of siblings nodes. + /** \brief Inserts a Node in the set of siblings nodes. * * If already present, assigns the minimal filtration value * between input filtration_value and the value already @@ -114,7 +113,8 @@ class Simplex_tree_siblings { Dictionary members_; }; -/* @} */ // end addtogroup simplex_tree +/** @}*/ // end addtogroup simplex_tree + } // namespace Gudhi #endif // SIMPLEX_TREE_SIMPLEX_TREE_SIBLINGS_H_ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h b/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h index 3e395ae2..29c76e50 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/indexing_tag.h @@ -20,7 +20,7 @@ namespace Gudhi { struct linear_indexing_tag { }; -/* \brief Tag for a zigzag ordering of simplices. */ +/** \brief Tag for a zigzag ordering of simplices. */ // struct zigzag_indexing_tag {}; } // namespace Gudhi diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h index 2ae1d6e0..1aebb045 100644 --- a/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h +++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h @@ -18,7 +18,7 @@ namespace Gudhi { namespace witness_complex { - /* \class Active_witness + /** \class Active_witness * \brief Class representing a list of nearest neighbors to a given witness. * \details Every element is a pair of a landmark identifier and the squared distance to it. */ diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h index 4f8fddba..18f19650 100644 --- a/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h +++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h @@ -18,7 +18,7 @@ namespace Gudhi { namespace witness_complex { -/* \brief Iterator in the nearest landmark list. +/** \brief Iterator in the nearest landmark list. * \details After the iterator reaches the end of the list, * the list is augmented by a (nearest landmark, distance) pair if possible. * If all the landmarks are present in the list, iterator returns the specific end value diff --git a/src/Witness_complex/include/gudhi/Strong_witness_complex.h b/src/Witness_complex/include/gudhi/Strong_witness_complex.h index b3699f77..ddc0da32 100644 --- a/src/Witness_complex/include/gudhi/Strong_witness_complex.h +++ b/src/Witness_complex/include/gudhi/Strong_witness_complex.h @@ -125,7 +125,7 @@ class Strong_witness_complex { //@} private: - /* \brief Adds recursively all the faces of a certain dimension dim-1 witnessed by the same witness. + /** \brief Adds recursively all the faces of a certain dimension dim-1 witnessed by the same witness. * Iterator is needed to know until how far we can take landmarks to form simplexes. * simplex is the prefix of the simplexes to insert. * The landmark pointed by aw_it is added to all formed simplices. diff --git a/src/Witness_complex/include/gudhi/Witness_complex.h b/src/Witness_complex/include/gudhi/Witness_complex.h index d655c7f6..66ae7af2 100644 --- a/src/Witness_complex/include/gudhi/Witness_complex.h +++ b/src/Witness_complex/include/gudhi/Witness_complex.h @@ -127,7 +127,7 @@ class Witness_complex { //@} private: - /* \brief Adds recursively all the faces of a certain dimension dim witnessed by the same witness. + /** \brief Adds recursively all the faces of a certain dimension dim witnessed by the same witness. * Iterator is needed to know until how far we can take landmarks to form simplexes. * simplex is the prefix of the simplexes to insert. * The output value indicates if the witness rests active or not. diff --git a/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h index 5845728a..007ab084 100644 --- a/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h +++ b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h @@ -11,7 +11,7 @@ #ifndef WITNESS_COMPLEX_ALL_FACES_IN_H_ #define WITNESS_COMPLEX_ALL_FACES_IN_H_ -/* \brief Check if the facets of the k-dimensional simplex witnessed +/** \brief Check if the facets of the k-dimensional simplex witnessed * by witness witness_id are already in the complex. * inserted_vertex is the handle of the (k+1)-th vertex witnessed by witness_id */ -- cgit v1.2.3 From a00ce1990b112aa34f72e5504ae0cfe14f11e292 Mon Sep 17 00:00:00 2001 From: albert-github Date: Sun, 22 May 2022 17:58:57 +0200 Subject: Spelling corrections A number of spelling corrections as reported by the codespell (see: https://github.com/codespell-project/codespell) program and lists. Some remarks: - not considered are grammatical errors - not considered are names in the code although there are a number that could be improved (like `childs` -> `children`) - in the documentation it could be made clearer what are variables and what is running text (e.g. by placing variables in running text between backticks) - some comments are in the French language, I think it would be better to have them in the English (United States version). --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 2 +- .../include/gudhi/Bitmap_cubical_complex.h | 2 +- .../include/gudhi/Bitmap_cubical_complex_base.h | 2 +- src/Bottleneck_distance/include/gudhi/Neighbors_finder.h | 2 +- src/Cech_complex/include/gudhi/Miniball.hpp | 2 +- src/Collapse/example/edge_collapse_conserve_persistence.cpp | 2 +- src/Collapse/include/gudhi/Flag_complex_edge_collapser.h | 2 +- .../distance_matrix_edge_collapse_rips_persistence.cpp | 2 +- .../utilities/point_cloud_edge_collapse_rips_persistence.cpp | 2 +- src/Contraction/doc/so3.svg | 2 +- src/Contraction/example/Garland_heckbert/Error_quadric.h | 2 +- src/Contraction/include/gudhi/Edge_contraction.h | 8 ++++---- src/Contraction/include/gudhi/Skeleton_blocker_contractor.h | 6 +++--- .../Coxeter_triangulation/Cell_complex/Hasse_diagram_cell.h | 6 +++--- .../include/gudhi/Functions/Function_affine_plane_in_Rd.h | 4 ++-- .../include/gudhi/Permutahedral_representation/Size_range.h | 2 +- src/GudhUI/todo.txt | 2 +- src/GudhUI/utils/Critical_points.h | 2 +- src/GudhUI/utils/Edge_contractor.h | 2 +- src/GudhUI/utils/Furthest_point_epsilon_net.h | 4 ++-- src/GudhUI/utils/K_nearest_builder.h | 2 +- src/GudhUI/utils/Lloyd_builder.h | 2 +- src/GudhUI/utils/Vertex_collapsor.h | 2 +- src/Nerve_GIC/utilities/km.py.COPYRIGHT | 2 +- .../include/gudhi/Persistence_intervals.h | 4 ++-- .../benchmark/performance_rips_persistence.cpp | 2 +- .../example/custom_persistence_sort.cpp | 2 +- .../example/persistence_from_simple_simplex_tree.cpp | 2 +- .../example/rips_multifield_persistence.cpp | 2 +- .../include/gudhi/Persistent_cohomology.h | 2 +- .../example_one_skeleton_rips_from_correlation_matrix.cpp | 2 +- src/Simplex_tree/example/graph_expansion_with_blocker.cpp | 2 +- src/Simplex_tree/example/simple_simplex_tree.cpp | 2 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 4 ++-- .../gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h | 2 +- .../test/simplex_tree_graph_expansion_unit_test.cpp | 4 ++-- src/Simplex_tree/test/simplex_tree_unit_test.cpp | 4 ++-- .../gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h | 2 +- .../gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h | 8 ++++---- .../include/gudhi/Skeleton_blocker/internal/Trie.h | 2 +- .../iterators/Skeleton_blockers_triangles_iterators.h | 4 ++-- .../include/gudhi/Skeleton_blocker_complex.h | 12 ++++++------ src/Tangential_complex/include/gudhi/Tangential_complex.h | 2 +- src/cmake/modules/FindTBB.cmake | 6 +++--- src/cmake/modules/GUDHI_modules.cmake | 4 ++-- src/cmake/modules/GUDHI_options.cmake | 10 +++++----- src/cmake/modules/GUDHI_third_party_libraries.cmake | 2 +- src/common/include/gudhi/reader_utils.h | 2 +- src/common/include/gudhi/writing_persistence_to_file.h | 4 ++-- src/python/CMakeLists.txt | 6 +++--- ...agram_persistence_from_correlation_matrix_file_example.py | 2 +- src/python/gudhi/hera/wasserstein.cc | 2 +- src/python/gudhi/persistence_graphical_tools.py | 2 +- src/python/gudhi/wasserstein/barycenter.py | 6 +++--- src/python/test/test_simplex_tree.py | 2 +- src/python/test/test_subsampling.py | 4 ++-- 56 files changed, 91 insertions(+), 91 deletions(-) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 028ec9bb..b1a9407b 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -69,7 +69,7 @@ template struct Is_Epeck_D> { static const bool val * \ingroup alpha_complex * * \details - * The data structure is constructing a CGAL Delaunay triangulation (for more informations on CGAL Delaunay + * The data structure is constructing a CGAL Delaunay triangulation (for more information on CGAL Delaunay * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) from a * range of points or from an OFF file (cf. Points_off_reader). * diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index aa255ec2..51f6a273 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -237,7 +237,7 @@ class Bitmap_cubical_complex : public T { * Filtration_simplex_iterator class provides an iterator though the whole structure in the order of filtration. * Secondary criteria for filtration are: * (1) Dimension of a cube (lower dimensional comes first). - * (2) Position in the data structure (the ones that are earlies in the data structure comes first). + * (2) Position in the data structure (the ones that are earliest in the data structure comes first). **/ class Filtration_simplex_range; diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h index f8f80ded..bafe7981 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h @@ -43,7 +43,7 @@ namespace cubical_complex { * Each cell is represented by a single * bit (in case of black and white bitmaps, or by a single element of a type T * (here T is a filtration type of a bitmap, typically a double). - * All the informations needed for homology and + * All the information needed for homology and * persistent homology computations (like dimension of a cell, boundary and * coboundary elements of a cell, are then obtained from the * position of the element in C. diff --git a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h index c65e6082..1d56f0b4 100644 --- a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h +++ b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h @@ -86,7 +86,7 @@ class Neighbors_finder { }; /** \internal \brief data structure used to find any point (including projections) in V near to a query point from U - * (which can be a projection) in a layered graph layer given as parmeter. + * (which can be a projection) in a layered graph layer given as parameter. * * V points have to be added manually using their index and before the first pull. A neighbor pulled is automatically * removed. diff --git a/src/Cech_complex/include/gudhi/Miniball.hpp b/src/Cech_complex/include/gudhi/Miniball.hpp index ce6cbb5b..55387a8a 100644 --- a/src/Cech_complex/include/gudhi/Miniball.hpp +++ b/src/Cech_complex/include/gudhi/Miniball.hpp @@ -1,4 +1,4 @@ -// Copright (C) 1999-2013, Bernd Gaertner +// Copyright (C) 1999-2013, Bernd Gaertner // $Rev: 3581 $ // // This program is free software: you can redistribute it and/or modify diff --git a/src/Collapse/example/edge_collapse_conserve_persistence.cpp b/src/Collapse/example/edge_collapse_conserve_persistence.cpp index b2c55e7a..19960597 100644 --- a/src/Collapse/example/edge_collapse_conserve_persistence.cpp +++ b/src/Collapse/example/edge_collapse_conserve_persistence.cpp @@ -103,7 +103,7 @@ int main(int argc, char* argv[]) { Gudhi::Euclidean_distance()); if (num_edges(proximity_graph) <= 0) { - std::cerr << "Total number of egdes are zero." << std::endl; + std::cerr << "Total number of edges is zero." << std::endl; exit(-1); } diff --git a/src/Collapse/include/gudhi/Flag_complex_edge_collapser.h b/src/Collapse/include/gudhi/Flag_complex_edge_collapser.h index c823901f..d0b3fe4a 100644 --- a/src/Collapse/include/gudhi/Flag_complex_edge_collapser.h +++ b/src/Collapse/include/gudhi/Flag_complex_edge_collapser.h @@ -53,7 +53,7 @@ struct Flag_complex_edge_collapser { #ifdef GUDHI_COLLAPSE_USE_DENSE_ARRAY // Minimal matrix interface // Using this matrix generally helps performance, but the memory use may be excessive for a very sparse graph - // (and in extreme cases the constant initialization of the matrix may start to dominate the runnning time). + // (and in extreme cases the constant initialization of the matrix may start to dominate the running time). // Are there cases where the matrix is too big but a hash table would help? std::vector neighbors_data; void init_neighbors_dense(){ diff --git a/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp b/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp index 11ee5871..38efb9e6 100644 --- a/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp +++ b/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp @@ -45,7 +45,7 @@ int main(int argc, char* argv[]) { min_persistence); Distance_matrix distances = Gudhi::read_lower_triangular_matrix_from_csv_file(csv_matrix_file); - std::cout << "Read the distance matrix succesfully, of size: " << distances.size() << std::endl; + std::cout << "Read the distance matrix successfully, of size: " << distances.size() << std::endl; Proximity_graph proximity_graph = Gudhi::compute_proximity_graph(boost::irange((size_t)0, distances.size()), diff --git a/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp b/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp index 0eea742c..d8f42ab6 100644 --- a/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp +++ b/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp @@ -77,7 +77,7 @@ int main(int argc, char* argv[]) { Gudhi::Euclidean_distance()); if (num_edges(proximity_graph) <= 0) { - std::cerr << "Total number of egdes are zero." << std::endl; + std::cerr << "Total number of edges is zero." << std::endl; exit(-1); } diff --git a/src/Contraction/doc/so3.svg b/src/Contraction/doc/so3.svg index adea3f38..f10cab98 100644 --- a/src/Contraction/doc/so3.svg +++ b/src/Contraction/doc/so3.svg @@ -177,7 +177,7 @@ x="309.4176" y="300.58682" id="tspan4515-4" - style="text-align:center;text-anchor:middle">Rips complex built uppon these pointsRips complex built upon these points class Error_quadric { * Quadric corresponding to the L2 distance to the plane. * * According to the notation of Garland Heckbert, they - * denote a quadric symetric matrix as : + * denote a quadric symmetric matrix as : * Q = [ q11 q12 q13 q14] * [ q12 q22 q23 q24] * [ q13 q23 q33 q34] diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index 58d627c2..0b43c3b3 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -46,7 +46,7 @@ the operations needed for edge contraction algorithms have polynomial complexity Therefore, the simplification can be done without enumerating the set of simplices that is often non tracktable in high-dimension and is then very efficient (sub-linear with regards to the number of simplices in practice). -A typical application of this package is homology group computation. It is illustrated in the next figure where a Rips complex is built uppon a set of high-dimensional points and +A typical application of this package is homology group computation. It is illustrated in the next figure where a Rips complex is built upon a set of high-dimensional points and simplified with edge contractions. It has initially a big number of simplices (around 20 millions) but simplifying it to a much reduced form with only 15 vertices (and 714 simplices) takes only few seconds on a desktop machine (see the example bellow). One can then compute homology group with a simplicial complex having very few simplices instead of running the homology algorithm on the much bigger initial set of @@ -65,7 +65,7 @@ This class design is policy based and heavily inspired from the similar edge col Four policies can be customized in this package: \li Cost_policy: specify how much cost an edge contraction of a given edge. The edge with lowest cost is iteratively picked and contracted if valid. -\li Valid_contraction_policy: specify if a given edge contraction is valid. For instance, this policy can check the link condition which ensures that the homotopy type is preserved afer the edge contraction. +\li Valid_contraction_policy: specify if a given edge contraction is valid. For instance, this policy can check the link condition which ensures that the homotopy type is preserved after the edge contraction. \li Placement_policy: every time an edge is contracted, its points are merge to one point specified by this policy. This may be the middle of the edge of some more sophisticated point such as the minimum of a cost as in \cite Garland. @@ -92,7 +92,7 @@ Despite this package is able to deal with \a arbitrary simplicial complexes (any it is still \a 65% times faster than the CGAL package which is focused on 2-manifold. The main reason is that few blockers appears during the simplification and hence, the algorithm only have to deal with the graph and not higher-dimensional simplices -(in this case triangles). However, we recall that higher-dimensional simplices are \a implicitely +(in this case triangles). However, we recall that higher-dimensional simplices are \a implicitly stored in the \ref skbl data-structure. Hence, one has to store simplices in an external map if some information needs to be associated with them (information that could be a filtration value or an orientation for instance). @@ -153,7 +153,7 @@ void build_rips(ComplexType& complex, double offset){ int main (int argc, char *argv[]) { if (argc!=3){ - std::cerr << "Usage "< { std::size_t id = 0; - // xxx do a parralel for + // xxx do a parallel for for (auto edge : complex_.edge_range()) { complex_[edge].index() = id++; Profile const& profile = create_profile(edge); @@ -474,7 +474,7 @@ typename GeometricSimplifiableComplex::Vertex_handle> { } void update_changed_edges() { - // xxx do a parralel for + // xxx do a parallel for DBG("update edges"); // sequential loop @@ -530,7 +530,7 @@ typename GeometricSimplifiableComplex::Vertex_handle> { // by definition of a blocker // todo uniqument utile pour la link condition - // laisser a l'utilisateur ? booleen update_heap_on_removed_blocker? + // laisser a l'utilisateur ? boolean update_heap_on_removed_blocker? Simplex blocker_copy(*blocker); for (auto x = blocker_copy.begin(); x != blocker_copy.end(); ++x) { for (auto y = x; ++y != blocker_copy.end();) { diff --git a/src/Coxeter_triangulation/include/gudhi/Coxeter_triangulation/Cell_complex/Hasse_diagram_cell.h b/src/Coxeter_triangulation/include/gudhi/Coxeter_triangulation/Cell_complex/Hasse_diagram_cell.h index 59e9a350..9b57da3c 100644 --- a/src/Coxeter_triangulation/include/gudhi/Coxeter_triangulation/Cell_complex/Hasse_diagram_cell.h +++ b/src/Coxeter_triangulation/include/gudhi/Coxeter_triangulation/Cell_complex/Hasse_diagram_cell.h @@ -95,7 +95,7 @@ class Hasse_diagram_cell { deleted_(false) {} /** - * Construcor of a cell of dimension dim having given additional information. + * Constructor of a cell of dimension dim having given additional information. **/ Hasse_diagram_cell(Additional_information ai, int dim) : dimension(dim), additional_info(ai), position(0), deleted_(false) {} @@ -125,7 +125,7 @@ class Hasse_diagram_cell { inline Additional_information& get_additional_information() { return this->additional_info; } /** - * Procedure to retrive position of the cell in the structure. It is used in + * Procedure to retrieve the position of the cell in the structure. It is used in * the implementation of Hasse diagram and set by it. Note that removal of * cell and subsequent call of clean_up_the_structure will change those * positions. @@ -186,7 +186,7 @@ class Hasse_diagram_cell { friend std::ostream& operator<<( std::ostream& out, const Hasse_diagram_cell& c) { // cout << "position : " << c.position << ", dimension : " << c.dimension << ", filtration: " << c.filtration << ", - // size of boudary : " << c.boundary.size() << "\n"; + // size of boundary : " << c.boundary.size() << "\n"; out << c.position << " " << c.dimension << " " << c.filtration << std::endl; for (std::size_t bd = 0; bd != c.boundary.size(); ++bd) { // do not write out the cells that has been deleted diff --git a/src/Coxeter_triangulation/include/gudhi/Functions/Function_affine_plane_in_Rd.h b/src/Coxeter_triangulation/include/gudhi/Functions/Function_affine_plane_in_Rd.h index b29f0906..dc6f5f90 100644 --- a/src/Coxeter_triangulation/include/gudhi/Functions/Function_affine_plane_in_Rd.h +++ b/src/Coxeter_triangulation/include/gudhi/Functions/Function_affine_plane_in_Rd.h @@ -51,7 +51,7 @@ struct Function_affine_plane_in_Rd { * plane in the d-dimensional Euclidean space. * * @param[in] normal_matrix A normal matrix of the affine plane. The number of rows should - * correspond to the ambient dimension, the number of columns should corespond to + * correspond to the ambient dimension, the number of columns should correspond to * the size of the normal basis (codimension). * @param[in] offset The offset vector of the affine plane. * The dimension of the vector should be the ambient dimension of the manifold. @@ -66,7 +66,7 @@ struct Function_affine_plane_in_Rd { * plane in the d-dimensional Euclidean space that passes through origin. * * @param[in] normal_matrix A normal matrix of the affine plane. The number of rows should - * correspond to the ambient dimension, the number of columns should corespond to + * correspond to the ambient dimension, the number of columns should correspond to * the size of the normal basis (codimension). */ Function_affine_plane_in_Rd(const Eigen::MatrixXd& normal_matrix) diff --git a/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Size_range.h b/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Size_range.h index c43effc8..6b137744 100644 --- a/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Size_range.h +++ b/src/Coxeter_triangulation/include/gudhi/Permutahedral_representation/Size_range.h @@ -19,7 +19,7 @@ namespace Gudhi { namespace coxeter_triangulation { -/** \brief Auxillary iterator class for sizes of parts in an ordered set partition. +/** \brief Auxiliary iterator class for sizes of parts in an ordered set partition. */ template class Size_iterator diff --git a/src/GudhUI/todo.txt b/src/GudhUI/todo.txt index 19d99a77..e59d06d4 100644 --- a/src/GudhUI/todo.txt +++ b/src/GudhUI/todo.txt @@ -18,5 +18,5 @@ x faire le lien MainWindow - Model -- bug -x bug ordre contraction -> just that first vertex placement dont work great +x bug ordre contraction -> just that first vertex placement doesn't work great x pb construction rips diff --git a/src/GudhUI/utils/Critical_points.h b/src/GudhUI/utils/Critical_points.h index 97e58737..65695434 100644 --- a/src/GudhUI/utils/Critical_points.h +++ b/src/GudhUI/utils/Critical_points.h @@ -103,7 +103,7 @@ template class Critical_points { // reduced to one point -> contractible return 1; else - // we dont know + // we don't know return 2; } diff --git a/src/GudhUI/utils/Edge_contractor.h b/src/GudhUI/utils/Edge_contractor.h index 0707b186..a71d0742 100644 --- a/src/GudhUI/utils/Edge_contractor.h +++ b/src/GudhUI/utils/Edge_contractor.h @@ -65,7 +65,7 @@ template class Edge_contractor { /** * @brief Modify complex to be the expansion of the k-nearest neighbor - * symetric graph. + * symmetric graph. */ Edge_contractor(SkBlComplex& complex, unsigned num_contractions) : complex_(complex), num_contractions_(num_contractions) { diff --git a/src/GudhUI/utils/Furthest_point_epsilon_net.h b/src/GudhUI/utils/Furthest_point_epsilon_net.h index 6eb71071..195d0014 100644 --- a/src/GudhUI/utils/Furthest_point_epsilon_net.h +++ b/src/GudhUI/utils/Furthest_point_epsilon_net.h @@ -27,7 +27,7 @@ template class Furthest_point_epsilon_net { /** * Let V be the set of vertices. - * Initially v0 is one arbitrarly vertex and the set V0 is {v0}. + * Initially v0 is one, arbitrary, vertex and the set V0 is {v0}. * Then Vk is computed as follows. * First we compute the vertex pk that is the furthest from Vk * then Vk = Vk \cup pk. @@ -54,7 +54,7 @@ template class Furthest_point_epsilon_net { /** * @brief Modify complex to be the expansion of the k-nearest neighbor - * symetric graph. + * symmetric graph. */ Furthest_point_epsilon_net(SkBlComplex& complex) : complex_(complex) { diff --git a/src/GudhUI/utils/K_nearest_builder.h b/src/GudhUI/utils/K_nearest_builder.h index 34483e58..454b2587 100644 --- a/src/GudhUI/utils/K_nearest_builder.h +++ b/src/GudhUI/utils/K_nearest_builder.h @@ -41,7 +41,7 @@ template class K_nearest_builder { public: /** * @brief Modify complex to be the expansion of the k-nearest neighbor - * symetric graph. + * symmetric graph. */ K_nearest_builder(SkBlComplex& complex, unsigned k) : complex_(complex) { complex.keep_only_vertices(); diff --git a/src/GudhUI/utils/Lloyd_builder.h b/src/GudhUI/utils/Lloyd_builder.h index c042564f..57e3dc0f 100644 --- a/src/GudhUI/utils/Lloyd_builder.h +++ b/src/GudhUI/utils/Lloyd_builder.h @@ -27,7 +27,7 @@ template class Lloyd_builder { /** * @brief Modify complex to be the expansion of the k-nearest neighbor - * symetric graph. + * symmetric graph. */ Lloyd_builder(SkBlComplex& complex, unsigned num_iterations) : complex_(complex), dim(-1) { if (!complex_.empty()) { diff --git a/src/GudhUI/utils/Vertex_collapsor.h b/src/GudhUI/utils/Vertex_collapsor.h index 030e4bb0..b1c48efd 100644 --- a/src/GudhUI/utils/Vertex_collapsor.h +++ b/src/GudhUI/utils/Vertex_collapsor.h @@ -31,7 +31,7 @@ template class Vertex_collapsor { /** * @brief Modify complex to be the expansion of the k-nearest neighbor - * symetric graph. + * symmetric graph. */ Vertex_collapsor(SkBlComplex& complex, size_t num_collapses) : complex_(complex), num_collapses_(num_collapses) { diff --git a/src/Nerve_GIC/utilities/km.py.COPYRIGHT b/src/Nerve_GIC/utilities/km.py.COPYRIGHT index bef7b121..5358d287 100644 --- a/src/Nerve_GIC/utilities/km.py.COPYRIGHT +++ b/src/Nerve_GIC/utilities/km.py.COPYRIGHT @@ -1,7 +1,7 @@ km.py is a fork of https://github.com/MLWave/kepler-mapper. Only the visualization part has been kept (Mapper part has been removed). -This file has te following Copyright : +This file has the following Copyright : The MIT License (MIT) diff --git a/src/Persistence_representations/include/gudhi/Persistence_intervals.h b/src/Persistence_representations/include/gudhi/Persistence_intervals.h index a6c1d6f0..f4324cb2 100644 --- a/src/Persistence_representations/include/gudhi/Persistence_intervals.h +++ b/src/Persistence_representations/include/gudhi/Persistence_intervals.h @@ -109,7 +109,7 @@ class Persistence_intervals { std::vector cumulative_histogram_of_lengths(size_t number_of_bins = 10) const; /** - * In this procedure we assume that each barcode is a characteristic function of a hight equal to its length. The + * In this procedure we assume that each barcode is a characteristic function of a height equal to its length. The *persistence diagram is a sum of such a functions. The procedure below construct a function being a * sum of the characteristic functions of persistence intervals. The first two parameters are the range in which the *function is to be computed and the last parameter is the number of bins in @@ -207,7 +207,7 @@ class Persistence_intervals { /** * This is a simple function projecting the persistence intervals to a real number. The function we use here is a sum *of squared lengths of intervals. It can be naturally interpreted as - * sum of step function, where the step hight it equal to the length of the interval. + * sum of step function, where the step height it equal to the length of the interval. * At the moment this function is not tested, since it is quite likely to be changed in the future. Given this, when *using it, keep in mind that it * will be most likely changed in the next versions. diff --git a/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp index 030b072a..3bec8830 100644 --- a/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp +++ b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp @@ -49,7 +49,7 @@ void timing_persistence(FilteredComplex & cpx * with a Hasse diagram. The Hasse diagram represents explicitly all * codimension 1 incidence relations in the complex, and hence leads to * a faster computation of persistence because boundaries are precomputed. - * Hovewer, the simplex tree may be constructed directly from a point cloud and + * However, the simplex tree may be constructed directly from a point cloud and * is more compact. * We compute persistent homology with coefficient fields Z/2Z and Z/1223Z. * We present also timings for the computation of multi-field persistent diff --git a/src/Persistent_cohomology/example/custom_persistence_sort.cpp b/src/Persistent_cohomology/example/custom_persistence_sort.cpp index 410cd987..bba0b2f7 100644 --- a/src/Persistent_cohomology/example/custom_persistence_sort.cpp +++ b/src/Persistent_cohomology/example/custom_persistence_sort.cpp @@ -33,7 +33,7 @@ using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomolog Gudhi::persistent_cohomology::Field_Zp >; std::vector random_points() { - // Instanciate a random point generator + // Instantiate a random point generator CGAL::Random rng(0); // Generate "points_number" random points in a vector diff --git a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp index bffaabdd..3da6771e 100644 --- a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp +++ b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp @@ -95,7 +95,7 @@ int main(int argc, char * const argv[]) { SimplexVector = {9, 10, 11}; st.insert_simplex_and_subfaces(SimplexVector, 0.3); - // ++ NINETH + // ++ NINTH std::clog << " - INSERT (2,10,12)" << std::endl; SimplexVector = {2, 10, 12}; st.insert_simplex_and_subfaces(SimplexVector, 0.3); diff --git a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp index 2edf5bc4..d154bcde 100644 --- a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp +++ b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp @@ -104,7 +104,7 @@ void program_options(int argc, char * argv[] ("min-field-charac,p", po::value(&min_p)->default_value(2), "Minimal characteristic p of the coefficient field Z/pZ.") ("max-field-charac,q", po::value(&max_p)->default_value(1223), - "Minimial characteristic q of the coefficient field Z/pZ.") + "Minimal characteristic q of the coefficient field Z/pZ.") ("min-persistence,m", po::value(&min_persistence), "Minimal lifetime of homology feature to be recorded. Default is 0"); diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index d428e497..2301a66b 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -211,7 +211,7 @@ class Persistent_cohomology { /** \brief Update the cohomology groups under the insertion of an edge. * * The 0-homology is maintained with a simple Union-Find data structure, which - * explains the existance of a specific function of edge insertions. */ + * explains the existence of a specific function of edge insertions. */ void update_cohomology_groups_edge(Simplex_handle sigma) { Simplex_handle u, v; boost::tie(u, v) = cpx_->endpoints(sigma); diff --git a/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp b/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp index 3d2ba54f..3811d1f1 100644 --- a/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp +++ b/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp @@ -40,7 +40,7 @@ int main() { throw "The input matrix is not a correlation matrix. The program will now terminate.\n"; } correlations[i][j] = 1 - correlations[i][j]; - // Here we make sure that we will get the treshold value equal to maximal + // Here we make sure that we will get the threshold value equal to maximal // distance in the matrix. if (correlations[i][j] > threshold) threshold = correlations[i][j]; } diff --git a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp index df52bf43..eef8b665 100644 --- a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp +++ b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp @@ -42,7 +42,7 @@ int main(int argc, char* const argv[]) { std::clog << vertex << ", "; } std::clog << "] ( " << stree.filtration(sh); - // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries) + // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boundaries) stree.assign_filtration(sh, stree.filtration(sh) + 1.); std::clog << " + 1. ) = " << result << std::endl; diff --git a/src/Simplex_tree/example/simple_simplex_tree.cpp b/src/Simplex_tree/example/simple_simplex_tree.cpp index e8bec596..965711da 100644 --- a/src/Simplex_tree/example/simple_simplex_tree.cpp +++ b/src/Simplex_tree/example/simple_simplex_tree.cpp @@ -129,7 +129,7 @@ int main(int argc, char* const argv[]) { std::clog << " - 3 NOT INSERTED" << std::endl; } - // ++ NINETH + // ++ NINTH std::clog << " * INSERT (3,0)" << std::endl; typeVectorVertex ninethSimplexVector = {3, 0}; returnValue = simplexTree.insert_simplex(ninethSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE)); diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 34bc5ace..629a1f9c 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -965,7 +965,7 @@ class Simplex_tree { // If we reached the end of the vertices, and the simplex has more vertices than the given simplex // => we found a coface - // Add a coface if we wan't the star or if the number of vertices of the current simplex matches with nbVertices + // Add a coface if we want the star or if the number of vertices of the current simplex matches with nbVertices bool addCoface = (star || curr_nbVertices == nbVertices); if (addCoface) cofaces.push_back(simplex); @@ -1491,7 +1491,7 @@ class Simplex_tree { int sh_dimension = dimension(sh); if (sh_dimension >= dimension_) - // Stop browsing as soon as the dimension is reached, no need to go furter + // Stop browsing as soon as the dimension is reached, no need to go further return false; new_dimension = (std::max)(new_dimension, sh_dimension); } diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h index ae140859..ad53710c 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h @@ -24,7 +24,7 @@ namespace Gudhi { * \brief Node of a simplex tree with filtration value * and simplex key. * - * It stores explicitely its own filtration value and its own Simplex_key. + * It stores explicitly its own filtration value and its own Simplex_key. */ template struct Simplex_tree_node_explicit_storage : SimplexTree::Filtration_simplex_base, SimplexTree::Key_simplex_base { diff --git a/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp index 6d63d8ae..54e23204 100644 --- a/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp @@ -93,7 +93,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_with_blockers_3, typeST, li std::clog << vertex << ", "; } std::clog << "] ( " << simplex_tree.filtration(sh); - // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries) + // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boundaries) simplex_tree.assign_filtration(sh, simplex_tree.filtration(sh) + 1.); std::clog << " + 1. ) = " << result << std::endl; @@ -160,7 +160,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_with_blockers_2, typeST, li std::clog << vertex << ", "; } std::clog << "] ( " << simplex_tree.filtration(sh); - // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries) + // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boundaries) simplex_tree.assign_filtration(sh, simplex_tree.filtration(sh) + 1.); std::clog << " + 1. ) = " << result << std::endl; diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp index b18e2ec4..79bb5a93 100644 --- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp @@ -287,7 +287,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var set_and_test_simplex_tree_dim_fil(st, eighthSimplexVector.size(), eighthSimplex.second); BOOST_CHECK(st.num_vertices() == (size_t) 4); - // ++ NINETH + // ++ NINTH std::clog << " - INSERT (3,0)" << std::endl; typeVectorVertex ninethSimplexVector{3, 0}; BOOST_CHECK(ninethSimplexVector.size() == 2); @@ -361,7 +361,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var test_simplex_tree_contains(st, seventhSimplex, 8); // (2,1,0) -> 8 std::clog << "simplex_tree_insertion - eighth - 3" << std::endl; test_simplex_tree_contains(st, eighthSimplex, 3); // (3) -> 3 - std::clog << "simplex_tree_insertion - nineth - 7" << std::endl; + std::clog << "simplex_tree_insertion - ninth - 7" << std::endl; test_simplex_tree_contains(st, ninethSimplex, 7); // (3,0) -> 7 // Display the Simplex_tree - Can not be done in the middle of 2 inserts diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h index 12fe6469..d83c0ab3 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h @@ -134,7 +134,7 @@ class Skeleton_blocker_simplex { } /** - * Substracts a from the simplex. + * Subtracts a from the simplex. */ void difference(const Skeleton_blocker_simplex & a) { std::vector v; diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h index 4c48ff31..5abd64d7 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h @@ -76,8 +76,8 @@ class Skeleton_blocker_sub_complex : public ComplexType { public: /** * Add a vertex 'global' of K to L. When added to L, this vertex will receive - * another number, addresses(global), its local adress. - * return the adress where the vertex lay on L. + * another number, addresses(global), its local address. + * return the address where the vertex lay on L. * The vertex corresponding to 'global' must not be already present * in the complex. */ @@ -174,7 +174,7 @@ class Skeleton_blocker_sub_complex : public ComplexType { // /** // * Allocates a simplex in L corresponding to the simplex s in K - // * with its local adresses and returns an AddressSimplex. + // * with its local addresses and returns an AddressSimplex. // */ // boost::optional get_address(const Root_simplex_handle & s) const; @@ -226,7 +226,7 @@ bool proper_face_in_union( } // Remark: this function should be friend in order to leave get_adresses private -// however doing so seemes currently not possible due to a visual studio bug c2668 +// however doing so seems currently not possible due to a visual studio bug c2668 // "the compiler does not support partial ordering of template functions as specified in the C++ Standard" // http://www.serkey.com/error-c2668-ambiguous-call-to-overloaded-function-bb45ft.html diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h index a43fa034..18ae6a92 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/internal/Trie.h @@ -107,7 +107,7 @@ struct Trie { } /** - * Goes to the root in the trie to consitute simplex + * Goes to the root in the trie to constitute simplex */ void add_vertices_up_to_the_root(Simplex& res) const { res.add_vertex(v); diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h index 37c0b4d3..2c49a1b8 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_triangles_iterators.h @@ -21,7 +21,7 @@ namespace skeleton_blocker { /** * \brief Iterator over the triangles that are * adjacent to a vertex of the simplicial complex. - * \remark Will be removed soon -> dont look + * \remark Will be removed soon -> don't look */ template class Triangle_around_vertex_iterator : public boost::iterator_facade @@ -95,7 +95,7 @@ class Triangle_around_vertex_iterator : public boost::iterator_facade /** * \brief Iterator over the triangles of the * simplicial complex. - * \remark Will be removed soon -> dont look + * \remark Will be removed soon -> don't look * */ template diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h index 031bcb9c..8ceaa480 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h @@ -438,7 +438,7 @@ class Skeleton_blocker_complex { } /** - * return the id of a vertex of adress local present in the graph + * return the id of a vertex of address local present in the graph */ Root_vertex_handle get_id(Vertex_handle local) const { assert(0 <= local.vertex && local.vertex < boost::num_vertices(skeleton)); @@ -740,7 +740,7 @@ class Skeleton_blocker_complex { * complex to the smallest flag complex that contains it. */ void remove_blockers() { - // Desallocate the blockers + // Deallocate the blockers while (!blocker_map_.empty()) { delete_blocker(blocker_map_.begin()->second); } @@ -764,8 +764,8 @@ class Skeleton_blocker_complex { public: /** - * Removes the simplex s from the set of blockers - * and desallocate s. + * Removes the simplex sigma from the set of blockers + * and deallocate sigma. */ void delete_blocker(Blocker_handle sigma) { if (visitor) @@ -960,7 +960,7 @@ class Skeleton_blocker_complex { } /* - * @brief returnrs true iff the complex is empty. + * @brief returns true iff the complex is empty. */ bool empty() const { return num_vertices() == 0; @@ -1043,7 +1043,7 @@ class Skeleton_blocker_complex { if (num_vertices() == 1) return true; for (auto vi : vertex_range()) { - // xxx todo faire une methode bool is_in_blocker(Vertex_handle) + // xxx todo create a method: bool is_in_blocker(Vertex_handle) if (blocker_map_.find(vi) == blocker_map_.end()) { // no blocker passes through the vertex, we just need to // check if the current vertex is linked to all others vertices of the complex diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h index f3491f91..cc424810 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h @@ -1152,7 +1152,7 @@ class Tangential_complex { #ifdef GUDHI_TC_VERY_VERBOSE std::cerr << "Inserted " << num_inserted_points << " points / " << num_attempts_to_insert_points - << " attemps to compute the star\n"; + << " attempts to compute the star\n"; #endif update_star(i); diff --git a/src/cmake/modules/FindTBB.cmake b/src/cmake/modules/FindTBB.cmake index 13f4d929..e6c42dc7 100644 --- a/src/cmake/modules/FindTBB.cmake +++ b/src/cmake/modules/FindTBB.cmake @@ -34,7 +34,7 @@ # # GvdB: Mac OS X distribution places libraries directly in lib directory. # -# For backwards compatibility, you may explicitely set the CMake variables TBB_ARCHITECTURE and TBB_COMPILER. +# For backwards compatibility, you may explicitly set the CMake variables TBB_ARCHITECTURE and TBB_COMPILER. # TBB_ARCHITECTURE [ ia32 | em64t | itanium ] # which architecture to use # TBB_COMPILER e.g. vc9 or cc3.2.3_libc2.3.2_kernel2.4.21 or cc4.0.1_os10.4.9 @@ -54,8 +54,8 @@ # TBB_MALLOC_DEBUG_LIBRARY, the TBB debug malloc library # TBB_FOUND, If false, don't try to use TBB. # TBB_INTERFACE_VERSION, as defined in tbb/tbb_stddef.h -# TBB_MALLOCPROXY_DEBUG_LIBRARY, the TBB debug malloc_proxy library (not included in TBB_LIBRARIES since it's optionnal) -# TBB_MALLOCPROXY_RELEASE_LIBRARY, the TBB release malloc_proxy library (not included in TBB_LIBRARIES since it's optionnal) +# TBB_MALLOCPROXY_DEBUG_LIBRARY, the TBB debug malloc_proxy library (not included in TBB_LIBRARIES since it's optional) +# TBB_MALLOCPROXY_RELEASE_LIBRARY, the TBB release malloc_proxy library (not included in TBB_LIBRARIES since it's optional) include(CheckCXXSourceCompiles) diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake index 13248f7e..ec1f756b 100644 --- a/src/cmake/modules/GUDHI_modules.cmake +++ b/src/cmake/modules/GUDHI_modules.cmake @@ -2,7 +2,7 @@ set(GUDHI_MODULES_FULL_LIST "") function(add_gudhi_module file_path) - option("WITH_MODULE_GUDHI_${file_path}" "Activate/desactivate ${file_path} compilation and installation" ON) + option("WITH_MODULE_GUDHI_${file_path}" "Activate/deactivate ${file_path} compilation and installation" ON) if (WITH_MODULE_GUDHI_${file_path}) set(GUDHI_MODULES ${GUDHI_MODULES} ${file_path} CACHE INTERNAL "GUDHI_MODULES") else() @@ -10,7 +10,7 @@ function(add_gudhi_module file_path) endif() # Required by user_version set(GUDHI_MODULES_FULL_LIST ${GUDHI_MODULES_FULL_LIST} ${file_path} PARENT_SCOPE) - # Include module headers is independant - You may ask for no Alpha complex module but Python interface i.e. + # Include module headers is independent - You may ask for no Alpha complex module but Python interface i.e. if(IS_DIRECTORY ${CMAKE_SOURCE_DIR}/src/${file_path}/include/) include_directories(src/${file_path}/include/) endif() diff --git a/src/cmake/modules/GUDHI_options.cmake b/src/cmake/modules/GUDHI_options.cmake index 3cd0a489..bffb3ffc 100644 --- a/src/cmake/modules/GUDHI_options.cmake +++ b/src/cmake/modules/GUDHI_options.cmake @@ -1,5 +1,5 @@ -option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF) -option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF) -option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON) -option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON) -option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON) +option(WITH_GUDHI_BENCHMARK "Activate/deactivate benchmark compilation" OFF) +option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation and installation" OFF) +option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) +option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON) +option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON) diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index 6a94d1f5..6ba822ad 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -174,7 +174,7 @@ if (WITH_GUDHI_PYTHON) message(FATAL_ERROR "ERROR: GUDHI_PYTHON_PATH is not valid.") endif(NOT GUDHI_PYTHON_PATH) - option(WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Usefull when setting rpath is not allowed" ON) + option(WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Useful when setting rpath is not allowed" ON) if(PYTHONINTERP_FOUND AND CYTHON_FOUND) if(SPHINX_FOUND) diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h index 29d5423d..a7d82541 100644 --- a/src/common/include/gudhi/reader_utils.h +++ b/src/common/include/gudhi/reader_utils.h @@ -231,7 +231,7 @@ std::vector> read_lower_triangular_matrix_from_csv std::string line; - // the first line is emtpy, so we ignore it: + // the first line is empty, so we ignore it: std::getline(in, line); std::vector values_in_this_line; result.push_back(values_in_this_line); diff --git a/src/common/include/gudhi/writing_persistence_to_file.h b/src/common/include/gudhi/writing_persistence_to_file.h index 2e36b831..3a0df1a8 100644 --- a/src/common/include/gudhi/writing_persistence_to_file.h +++ b/src/common/include/gudhi/writing_persistence_to_file.h @@ -48,7 +48,7 @@ class Persistence_interval_common { : birth_(birth), death_(death), dimension_(dim), arith_element_(field) {} /** - * Operator to compare two persistence pairs. During the comparision all the + * Operator to compare two persistence pairs. During the comparison all the * fields: birth, death, dimensiona and arith_element_ are taken into account * and they all have to be equal for two pairs to be equal. **/ @@ -65,7 +65,7 @@ class Persistence_interval_common { /** * Operator to compare objects of a type Persistence_interval_common. * One intervals is smaller than the other if it has lower persistence. - * Note that this operator do not take Arith_element into account when doing comparisions. + * Note that this operator do not take Arith_element into account when doing comparisons. **/ bool operator<(const Persistence_interval_common& i2) const { return fabs(this->death_ - this->birth_) < fabs(i2.death_ - i2.birth_); diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 54221151..af0b6115 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -329,9 +329,9 @@ if(PYTHONINTERP_FOUND) if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/") # User warning - Sphinx is a static pages generator, and configured to work fine with user_version - # Images and biblio warnings because not found on developper version + # Images and biblio warnings because not found on developer version if (GUDHI_PYTHON_PATH STREQUAL "src/python") - set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss") + set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developer version. Images and biblio will miss") endif() # sphinx target requires gudhi.so, because conf.py reads gudhi version from it add_custom_target(sphinx @@ -484,7 +484,7 @@ if(PYTHONINTERP_FOUND) add_gudhi_py_test(test_euclidean_witness_complex) # Datasets generators - add_gudhi_py_test(test_datasets_generators) # TODO separate full python datasets generators in another test file independant from CGAL ? + add_gudhi_py_test(test_datasets_generators) # TODO separate full python datasets generators in another test file independent from CGAL ? endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) diff --git a/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py index ea2eb7e1..0b35dbc5 100755 --- a/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py +++ b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py @@ -40,7 +40,7 @@ parser.add_argument( args = parser.parse_args() if not (-1.0 < args.min_edge_correlation < 1.0): - print("Wrong value of the treshold corelation (should be between -1 and 1).") + print("Wrong value of the threshold corelation (should be between -1 and 1).") sys.exit(1) print("#####################################################################") diff --git a/src/python/gudhi/hera/wasserstein.cc b/src/python/gudhi/hera/wasserstein.cc index 1a21f02f..fa0cf8aa 100644 --- a/src/python/gudhi/hera/wasserstein.cc +++ b/src/python/gudhi/hera/wasserstein.cc @@ -29,7 +29,7 @@ double wasserstein_distance( if(std::isinf(internal_p)) internal_p = hera::get_infinity(); params.internal_p = internal_p; params.delta = delta; - // The extra parameters are purposedly not exposed for now. + // The extra parameters are purposely not exposed for now. return hera::wasserstein_dist(diag1, diag2, params); } diff --git a/src/python/gudhi/persistence_graphical_tools.py b/src/python/gudhi/persistence_graphical_tools.py index 7ed11360..21275cdd 100644 --- a/src/python/gudhi/persistence_graphical_tools.py +++ b/src/python/gudhi/persistence_graphical_tools.py @@ -332,7 +332,7 @@ def plot_persistence_diagram( axes.plot([axis_start, axis_end], [infinity, infinity], linewidth=1.0, color="k", alpha=alpha) # Infinity label yt = axes.get_yticks() - yt = yt[np.where(yt < axis_end)] # to avoid ploting ticklabel higher than infinity + yt = yt[np.where(yt < axis_end)] # to avoid plotting ticklabel higher than infinity yt = np.append(yt, infinity) ytl = ["%.3f" % e for e in yt] # to avoid float precision error ytl[-1] = r"$+\infty$" diff --git a/src/python/gudhi/wasserstein/barycenter.py b/src/python/gudhi/wasserstein/barycenter.py index d67bcde7..bb6e641e 100644 --- a/src/python/gudhi/wasserstein/barycenter.py +++ b/src/python/gudhi/wasserstein/barycenter.py @@ -37,7 +37,7 @@ def lagrangian_barycenter(pdiagset, init=None, verbose=False): :param init: The initial value for barycenter estimate. If ``None``, init is made on a random diagram from the dataset. Otherwise, it can be an ``int`` (then initialization is made on ``pdiagset[init]``) - or a `(n x 2)` ``numpy.array`` enconding a persistence diagram with `n` points. + or a `(n x 2)` ``numpy.array`` encoding a persistence diagram with `n` points. :type init: ``int``, or (n x 2) ``np.array`` :param verbose: if ``True``, returns additional information about the barycenter. :type verbose: boolean @@ -45,7 +45,7 @@ def lagrangian_barycenter(pdiagset, init=None, verbose=False): (local minimum of the energy function). If ``pdiagset`` is empty, returns ``None``. If verbose, returns a couple ``(Y, log)`` where ``Y`` is the barycenter estimate, - and ``log`` is a ``dict`` that contains additional informations: + and ``log`` is a ``dict`` that contains additional information: - `"groupings"`, a list of list of pairs ``(i,j)``. Namely, ``G[k] = [...(i, j)...]``, where ``(i,j)`` indicates that `pdiagset[k][i]`` is matched to ``Y[j]`` if ``i = -1`` or ``j = -1``, it means they represent the diagonal. @@ -73,7 +73,7 @@ def lagrangian_barycenter(pdiagset, init=None, verbose=False): nb_iter = 0 - converged = False # stoping criterion + converged = False # stopping criterion while not converged: nb_iter += 1 K = len(Y) # current nb of points in Y (some might be on diagonal) diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py index 688f4fd6..ba8c455f 100755 --- a/src/python/test/test_simplex_tree.py +++ b/src/python/test/test_simplex_tree.py @@ -528,7 +528,7 @@ def test_expansion_with_blocker(): def blocker(simplex): try: - # Block all simplices that countains vertex 6 + # Block all simplices that contains vertex 6 simplex.index(6) print(simplex, ' is blocked') return True diff --git a/src/python/test/test_subsampling.py b/src/python/test/test_subsampling.py index 4019852e..3431f372 100755 --- a/src/python/test/test_subsampling.py +++ b/src/python/test/test_subsampling.py @@ -91,7 +91,7 @@ def test_simple_choose_n_farthest_points_randomed(): assert gudhi.choose_n_farthest_points(points=[], nb_points=1) == [] assert gudhi.choose_n_farthest_points(points=point_set, nb_points=0) == [] - # Go furter than point set on purpose + # Go further than point set on purpose for iter in range(1, 10): sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=iter) for sub in sub_set: @@ -117,7 +117,7 @@ def test_simple_pick_n_random_points(): assert gudhi.pick_n_random_points(points=[], nb_points=1) == [] assert gudhi.pick_n_random_points(points=point_set, nb_points=0) == [] - # Go furter than point set on purpose + # Go further than point set on purpose for iter in range(1, 10): sub_set = gudhi.pick_n_random_points(points=point_set, nb_points=iter) for sub in sub_set: -- cgit v1.2.3 From 7fc251e83602d0bce697dbaa744099e57d6df397 Mon Sep 17 00:00:00 2001 From: albert-github Date: Sun, 22 May 2022 18:41:40 +0200 Subject: Update src/Persistent_cohomology/example/rips_multifield_persistence.cpp Co-authored-by: Marc Glisse --- src/Persistent_cohomology/example/rips_multifield_persistence.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp index d154bcde..ca26a5b9 100644 --- a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp +++ b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp @@ -104,7 +104,7 @@ void program_options(int argc, char * argv[] ("min-field-charac,p", po::value(&min_p)->default_value(2), "Minimal characteristic p of the coefficient field Z/pZ.") ("max-field-charac,q", po::value(&max_p)->default_value(1223), - "Minimal characteristic q of the coefficient field Z/pZ.") + "Maximal characteristic q of the coefficient field Z/pZ.") ("min-persistence,m", po::value(&min_persistence), "Minimal lifetime of homology feature to be recorded. Default is 0"); -- cgit v1.2.3 From 6c8024c5d17fe3dc03584f97bc883b7f56f71b7e Mon Sep 17 00:00:00 2001 From: albert-github Date: Sun, 22 May 2022 18:41:52 +0200 Subject: Update src/python/test/test_simplex_tree.py Co-authored-by: Marc Glisse --- src/python/test/test_simplex_tree.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py index ba8c455f..678bc905 100755 --- a/src/python/test/test_simplex_tree.py +++ b/src/python/test/test_simplex_tree.py @@ -528,7 +528,7 @@ def test_expansion_with_blocker(): def blocker(simplex): try: - # Block all simplices that contains vertex 6 + # Block all simplices that contain vertex 6 simplex.index(6) print(simplex, ' is blocked') return True -- cgit v1.2.3 From fa413a02065e03296d9cf375c2b74d5fd381f3bb Mon Sep 17 00:00:00 2001 From: albert-github Date: Sun, 22 May 2022 18:42:34 +0200 Subject: Update src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h Co-authored-by: Marc Glisse --- src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index 51f6a273..4a6af3a4 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -237,7 +237,7 @@ class Bitmap_cubical_complex : public T { * Filtration_simplex_iterator class provides an iterator though the whole structure in the order of filtration. * Secondary criteria for filtration are: * (1) Dimension of a cube (lower dimensional comes first). - * (2) Position in the data structure (the ones that are earliest in the data structure comes first). + * (2) Position in the data structure (the ones that are earliest in the data structure come first). **/ class Filtration_simplex_range; -- cgit v1.2.3 From 6e5b348cb02acd16f990df629a9d938ecb3a318f Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 23 May 2022 10:23:25 +0200 Subject: updated output for cubical complexes --- ext/gudhi-deploy | 2 +- src/python/doc/cubical_complex_tflow_itf_ref.rst | 2 +- src/python/gudhi/tensorflow/cubical_layer.py | 18 +++++++++++------- src/python/test/test_diff.py | 4 ++-- 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/ext/gudhi-deploy b/ext/gudhi-deploy index 975d1bff..290ade10 160000 --- a/ext/gudhi-deploy +++ b/ext/gudhi-deploy @@ -1 +1 @@ -Subproject commit 975d1bffb317f3b84bf1a3d576cdfdbf7b45861c +Subproject commit 290ade1086bedbc96a35df886cadecabbf4072e6 diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index 881a2950..18b97adf 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -19,7 +19,7 @@ Example of gradient computed from cubical persistence cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0] + dgm = cl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 8db46a8e..e8674d7b 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -58,19 +58,23 @@ class CubicalLayer(tf.keras.layers.Layer): X (TensorFlow variable): pixel values of the cubical complex Returns: - dgms (list of TensorFlow variables): list of cubical persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, each element of this list is an array containing the finite part of the corresponding persistence diagram, of shape [num_finite_points, 2]. Note that there is no essential part since this part is always empty in cubical persistence diagrams, except in homology dimension zero, where the essential part always contains a single point, with abscissa equal to the smallest value in the complex, and infinite ordinate. + dgms (list of tuple of TensorFlow variables): list of cubical persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively. Note that the essential part is always empty in cubical persistence diagrams, except in homology dimension zero, where the essential part always contains a single point, with abscissa equal to the smallest value in the complex, and infinite ordinate """ # Compute pixels associated to positive and negative simplices # Don't compute gradient for this operation Xflat = tf.reshape(X, [-1]) - Xdim = X.shape - indices_list = _Cubical(Xflat.numpy(), Xdim, self.dimensions) + Xdim, Xflat_numpy = X.shape, Xflat.numpy() + indices_list = _Cubical(Xflat_numpy, Xdim, self.dimensions) + index_essential = np.argmin(Xflat_numpy) # index of minimum pixel value for essential persistence diagram # Get persistence diagram by simply picking the corresponding entries in the image - self.dgms = [tf.reshape(tf.gather(Xflat, indices), [-1,2]) for indices in indices_list] - for idx_dim in range(len(self.min_persistence)): + self.dgms = [] + for idx_dim, dimension in enumerate(self.dimensions): + finite_dgm = tf.reshape(tf.gather(Xflat, indices_list[idx_dim]), [-1,2]) + essential_dgm = tf.reshape(tf.gather(Xflat, index_essential), [-1,1]) if dimension == 0 else tf.zeros([0, 1]) min_pers = self.min_persistence[idx_dim] if min_pers >= 0: - finite_dgm = self.dgms[idx_dim] persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers) - self.dgms[idx_dim] = tf.reshape(tf.gather(finite_dgm, indices=persistent_indices), [-1,2]) + self.dgms.append((tf.reshape(tf.gather(finite_dgm, indices=persistent_indices), [-1,2]), essential_dgm)) + else: + self.dgms.append((finite_dgm, essential_dgm)) return self.dgms diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index 2529cf22..e0a4717c 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -22,7 +22,7 @@ def test_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0] + dgm = cl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert tf.norm(grads[0]-tf.constant([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]]),1) <= 1e-6 @@ -34,7 +34,7 @@ def test_nonsquare_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0] + dgm = cl.call(X)[0][0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert tf.norm(grads[0]-tf.constant([[0.,0.5,-0.5],[0.,0.,0.]]),1) <= 1e-6 -- cgit v1.2.3 From 7941d119872fbd6bc91dca744111b1320f268150 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 23 May 2022 11:57:31 +0200 Subject: doc review: comment on remove build directory --- .github/how_to_compile_gudhi_in_a_conda_env.md | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index 3691b75e..3870381b 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -27,8 +27,7 @@ pip install -r ext/gudhi-deploy/test-requirements.txt # pytorch can be painful In order to compile all c++ utilities, examples, benchmarks, unitary tests, and python module: ```bash cd /workdir/gudhi -rm -rf build -mkdir build +rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build # To build all even examples and benchmarks cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_BENCHMARK=ON .. @@ -39,8 +38,7 @@ cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_ In order to compile only python module ```bash cd /workdir/gudhi -rm -rf build -mkdir build +rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build # To build all even examples and benchmarks cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. @@ -60,8 +58,7 @@ PYTHONPATH=/workdir/gudhi/build/src/python python # or ipython, jupyter, ... ```bash cd /workdir/gudhi -rm -rf build -mkdir build +rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build # python OFF to prevent python modules search makes cmake faster cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_PYTHON=OFF -DUSER_VERSION_DIR=version .. @@ -80,8 +77,7 @@ firefox html/index.html # [optional] To display the c++ documentation. Anything ```bash cd /workdir/gudhi -rm -rf build -mkdir build +rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build # python OFF to prevent python modules search makes cmake faster - it is the next one in user version that matters cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_PYTHON=OFF -DUSER_VERSION_DIR=version .. -- cgit v1.2.3 From 953cbffc67c2f6e26f10ca3a4538ef56b933dcc3 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 23 May 2022 12:01:04 +0200 Subject: doc review: comment on number of CPU for python module --- .github/how_to_compile_gudhi_in_a_conda_env.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index 3870381b..533d0fd2 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -44,7 +44,7 @@ cd build cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. cd src/python # To build python module in parallel -python setup.py build_ext -j 16 --inplace +python setup.py build_ext -j 16 --inplace # 16 is the number of CPUthat are used to compile the python module. Can be any other value. # to clean the build # python setup.py clean --all ``` @@ -88,6 +88,6 @@ cd build cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. cd python # To build python module in parallel -python setup.py build_ext -j 16 --inplace +python setup.py build_ext -j 16 --inplace # 16 is the number of CPUthat are used to compile the python module. Can be any other value. firefox sphinx/index.html # [optional] To display the python documentation. Anything else than firefox can be used. ``` \ No newline at end of file -- cgit v1.2.3 From b440e2b6e61bd81dac8f887a7cdac55e7daa2940 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 23 May 2022 12:13:01 +0200 Subject: doc review: remove wrong comment --- .github/how_to_compile_gudhi_in_a_conda_env.md | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index 533d0fd2..0d677c1f 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -40,7 +40,6 @@ In order to compile only python module cd /workdir/gudhi rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build -# To build all even examples and benchmarks cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. cd src/python # To build python module in parallel -- cgit v1.2.3 From 1e548e96191941aba8829bddb45cfe6a28fb9a81 Mon Sep 17 00:00:00 2001 From: albert-github Date: Mon, 23 May 2022 12:27:40 +0200 Subject: Documentation: Some automatic redirects Corrected some more automatic redirects. --- biblio/bibliography.bib | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index 8462e731..0a3ef43d 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -1151,7 +1151,7 @@ language={English} editor = {Lars Arge and J{\'a}nos Pach}, publisher = {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik}, address = {Dagstuhl, Germany}, - URL = {https://drops.dagstuhl.de/opus/volltexte/2015/5098}, + URL = {https://drops.dagstuhl.de/opus/volltexte/2015/5098/}, URN = {urn:nbn:de:0030-drops-50981}, doi = {10.4230/LIPIcs.SOCG.2015.642}, annote = {Keywords: Simplicial complex, Compact data structures, Automaton, NP-hard} @@ -1341,7 +1341,7 @@ doi="10.1007/978-3-030-43408-3_2", editor = {Sergio Cabello and Danny Z. Chen}, publisher = {Schloss Dagstuhl--Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, - URL = {https://drops.dagstuhl.de/opus/volltexte/2020/12177}, + URL = {https://drops.dagstuhl.de/opus/volltexte/2020/12177/}, URN = {urn:nbn:de:0030-drops-121777}, doi = {10.4230/LIPIcs.SoCG.2020.19}, annote = {Keywords: Computational Topology, Topological Data Analysis, Edge Collapse, Simple Collapse, Persistent homology} -- cgit v1.2.3 From 91b21f6cfdf8e225070514c34bd6fcae296e3d52 Mon Sep 17 00:00:00 2001 From: albert-github Date: Tue, 24 May 2022 10:34:43 +0200 Subject: Documentation: Obsolete CLASS_DIARAMS Since doxygen version 1.9.3 the settings `CLASS_DIAGRAMS` and `CLASS_GRAPH` have been integrated into `CLASS_GRAPH` and `CLASS_DIAGRAMS` is now obsolete. The value of `CLASS+GRAPH` doesn't have to be adjusted, in this case, as it was already set to `NO`. --- src/Doxyfile.in | 9 +-------- src/cmake/modules/GUDHI_doxygen_target.cmake | 6 ++++++ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/src/Doxyfile.in b/src/Doxyfile.in index f76ba2bd..13668993 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -2082,14 +2082,7 @@ EXTERNAL_PAGES = YES # Configuration options related to the dot tool #--------------------------------------------------------------------------- -# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram -# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to -# NO turns the diagrams off. Note that this option also works with HAVE_DOT -# disabled, but it is recommended to install and use dot, since it yields more -# powerful graphs. -# The default value is: YES. - -CLASS_DIAGRAMS = NO +@GUDHI_DOXYGEN_CLASS_DIAGRAMS@ # You can include diagrams made with dia in doxygen documentation. Doxygen will # then run dia to produce the diagram and insert it in the documentation. The diff --git a/src/cmake/modules/GUDHI_doxygen_target.cmake b/src/cmake/modules/GUDHI_doxygen_target.cmake index 0f80b187..e8064466 100644 --- a/src/cmake/modules/GUDHI_doxygen_target.cmake +++ b/src/cmake/modules/GUDHI_doxygen_target.cmake @@ -44,6 +44,12 @@ if(DOXYGEN_FOUND) set(GUDHI_DOXYGEN_UTILS_PATH "utilities/*") endif() + if (DOXYGEN_VERSION VERSION_LESS 1.9.3) + set(GUDHI_DOXYGEN_CLASS_DIAGRAMS "CLASS_DIAGRAMS = NO") + else() + set(GUDHI_DOXYGEN_CLASS_DIAGRAMS "") + endif() + configure_file(${GUDHI_DOXYGEN_SOURCE_PREFIX}/Doxyfile.in "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" @ONLY) add_custom_target(doxygen ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile -- cgit v1.2.3 From dcd4204d62a4c9a4f3d9ebc61341fba25ae19687 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 24 May 2022 11:44:49 +0200 Subject: Use autofunction instead of automodule in doc and add 2d spiral image --- src/python/doc/datasets.rst | 16 ++++++++++++---- src/python/doc/img/spiral_2d.png | Bin 0 -> 279276 bytes 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 src/python/doc/img/spiral_2d.png diff --git a/src/python/doc/datasets.rst b/src/python/doc/datasets.rst index 62b7dca0..d2975533 100644 --- a/src/python/doc/datasets.rst +++ b/src/python/doc/datasets.rst @@ -112,13 +112,21 @@ Fetching datasets We provide some ready-to-use datasets that are not available by default when getting GUDHI, and need to be fetched explicitly. +.. autofunction:: gudhi.datasets.remote.fetch_bunny + .. figure:: ./img/bunny.png :figclass: align-center 3D Stanford bunny with 35947 vertices. -.. automodule:: gudhi.datasets.remote - :members: - :special-members: - :show-inheritance: +.. autofunction:: gudhi.datasets.remote.fetch_spiral_2d + +.. figure:: ./img/spiral_2d.png + :figclass: align-center + + 2D spiral with 114562 vertices. + +.. autofunction:: gudhi.datasets.remote.get_data_home + +.. autofunction:: gudhi.datasets.remote.clear_data_home diff --git a/src/python/doc/img/spiral_2d.png b/src/python/doc/img/spiral_2d.png new file mode 100644 index 00000000..abd247cd Binary files /dev/null and b/src/python/doc/img/spiral_2d.png differ -- cgit v1.2.3 From 4d2f5a1c165204765a04594a9f1f6ba9bcb939ba Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 24 May 2022 11:46:16 +0200 Subject: Specify in doc the use of cache when fetching datasets with wrapping functions --- src/python/gudhi/datasets/remote.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index eac8caf3..d2ae2a75 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -143,6 +143,8 @@ def _get_archive_path(file_path, label): def fetch_spiral_2d(file_path = None): """ Fetch spiral_2d dataset remotely. + Note that if the dataset already exists in the target location, it is not downloaded again, + and the corresponding array is returned from cache. Parameters ---------- @@ -169,6 +171,8 @@ def fetch_bunny(file_path = None, accept_license = False): """ Fetch Stanford bunny dataset remotely and its LICENSE file. This dataset contains 35947 vertices. + Note that if the dataset already exists in the target location, it is not downloaded again, + and the corresponding array is returned from cache. Parameters ---------- -- cgit v1.2.3 From dbaeddbfef69770757efcf153998bf997c085465 Mon Sep 17 00:00:00 2001 From: albert-github Date: Tue, 24 May 2022 12:16:02 +0200 Subject: No need to copy Doxyfile.in In my opinion there is no need to copy the `Doxyfile.in` file to the build directory (neither in the User nor the development version). The usage is: ``` configure_file(${GUDHI_DOXYGEN_SOURCE_PREFIX}/Doxyfile.in "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" @ONLY) ``` in the file `src/cmake/modules/GUDHI_doxygen_target.cmake` and we see that this uses the file from its original source directory. --- src/cmake/modules/GUDHI_user_version_target.cmake | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index 9e76c3d9..4487ad86 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -14,8 +14,6 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E make_directory ${GUDHI_USER_VERSION_DIR} COMMENT "user_version creation in ${GUDHI_USER_VERSION_DIR}") -file(COPY "${CMAKE_SOURCE_DIR}/src/Doxyfile.in" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/") - # Generate bib files for Doxygen - cf. root CMakeLists.txt for explanation string(TIMESTAMP GUDHI_VERSION_YEAR "%Y") configure_file(${CMAKE_SOURCE_DIR}/biblio/how_to_cite_gudhi.bib.in "${CMAKE_CURRENT_BINARY_DIR}/biblio/how_to_cite_gudhi.bib" @ONLY) -- cgit v1.2.3 From ce34ee3e5c28c48d605f23332cfa3c10e471a047 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 24 May 2022 15:57:52 +0200 Subject: Make get_data_home function private --- src/python/doc/datasets.rst | 2 -- src/python/gudhi/datasets/remote.py | 6 +++--- src/python/test/test_remote_datasets.py | 4 ++-- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/python/doc/datasets.rst b/src/python/doc/datasets.rst index d2975533..8b0912c4 100644 --- a/src/python/doc/datasets.rst +++ b/src/python/doc/datasets.rst @@ -127,6 +127,4 @@ We provide some ready-to-use datasets that are not available by default when get 2D spiral with 114562 vertices. -.. autofunction:: gudhi.datasets.remote.get_data_home - .. autofunction:: gudhi.datasets.remote.clear_data_home diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index d2ae2a75..7e6f647f 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -16,7 +16,7 @@ import shutil import numpy as np -def get_data_home(data_home = None): +def _get_data_home(data_home = None): """ Return the path of the remote datasets directory. This folder is used to store remotely fetched datasets. @@ -55,7 +55,7 @@ def clear_data_home(data_home = None): If `None` and the 'GUDHI_DATA' environment variable does not exist, the default directory to be removed is set to "~/gudhi_data". """ - data_home = get_data_home(data_home) + data_home = _get_data_home(data_home) shutil.rmtree(data_home) def _checksum_sha256(file_path): @@ -130,7 +130,7 @@ def _get_archive_path(file_path, label): Full path of archive including filename. """ if file_path is None: - archive_path = join(get_data_home(), label) + archive_path = join(_get_data_home(), label) dirname = split(archive_path)[0] makedirs(dirname, exist_ok=True) else: diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py index cde9fa22..e5d2de82 100644 --- a/src/python/test/test_remote_datasets.py +++ b/src/python/test/test_remote_datasets.py @@ -18,8 +18,8 @@ from os.path import isdir, expanduser, exists from os import remove, environ def test_data_home(): - # Test get_data_home and clear_data_home on new empty folder - empty_data_home = remote.get_data_home(data_home="empty_folder_for_test") + # Test _get_data_home and clear_data_home on new empty folder + empty_data_home = remote._get_data_home(data_home="empty_folder_for_test") assert isdir(empty_data_home) remote.clear_data_home(data_home=empty_data_home) -- cgit v1.2.3 From 899fb73b33cb6976c39a42ba26a31cf2acde63ee Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 25 May 2022 16:53:04 +0200 Subject: Add info in the doc concerning default data_home and 'GUDHI_DATA' env variable --- src/python/doc/datasets.rst | 3 +++ src/python/gudhi/datasets/remote.py | 13 +++++++++++++ 2 files changed, 16 insertions(+) diff --git a/src/python/doc/datasets.rst b/src/python/doc/datasets.rst index 8b0912c4..2d11a19d 100644 --- a/src/python/doc/datasets.rst +++ b/src/python/doc/datasets.rst @@ -112,6 +112,9 @@ Fetching datasets We provide some ready-to-use datasets that are not available by default when getting GUDHI, and need to be fetched explicitly. +By **default**, the fetched datasets directory is set to a folder named **'gudhi_data'** in the **user home folder**. +Alternatively, it can be set using the **'GUDHI_DATA'** environment variable. + .. autofunction:: gudhi.datasets.remote.fetch_bunny .. figure:: ./img/bunny.png diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 7e6f647f..48bdcfa6 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -143,6 +143,7 @@ def _get_archive_path(file_path, label): def fetch_spiral_2d(file_path = None): """ Fetch spiral_2d dataset remotely. + Note that if the dataset already exists in the target location, it is not downloaded again, and the corresponding array is returned from cache. @@ -150,8 +151,12 @@ def fetch_spiral_2d(file_path = None): ---------- file_path : string Full path of the downloaded file including filename. + Default is None, meaning that it's set to "data_home/points/spiral_2d/spiral_2d.npy". + The "data_home" directory is set by default to "~/gudhi_data", + unless the 'GUDHI_DATA' environment variable is set. + Returns ------- points: numpy array @@ -170,7 +175,9 @@ def fetch_spiral_2d(file_path = None): def fetch_bunny(file_path = None, accept_license = False): """ Fetch Stanford bunny dataset remotely and its LICENSE file. + This dataset contains 35947 vertices. + Note that if the dataset already exists in the target location, it is not downloaded again, and the corresponding array is returned from cache. @@ -178,10 +185,16 @@ def fetch_bunny(file_path = None, accept_license = False): ---------- file_path : string Full path of the downloaded file including filename. + Default is None, meaning that it's set to "data_home/points/bunny/bunny.npy". In this case, the LICENSE file would be downloaded as "data_home/points/bunny/bunny.LICENSE". + + The "data_home" directory is set by default to "~/gudhi_data", + unless the 'GUDHI_DATA' environment variable is set. + accept_license : boolean Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. + Default is False. Returns -- cgit v1.2.3 From ba96ba348dc9163a51752cab639f0333f5af0533 Mon Sep 17 00:00:00 2001 From: albert-github Date: Thu, 26 May 2022 15:54:43 +0200 Subject: issue #613 [cpp documentation] Footer needs to be generated with a more recent version of doxygen The problem was that the update of the treeview was not called in the footer, though after correcting this some new problems occurred in respect to the treeview (the small pointers ran into the text) and with the in page table of cvontents (e.g. in installation.html). - footer.html: call proper update function by means of `
`, as base the 1.9.4 version served but also older version work. - stylesheet.css, Doxyfile.in: implemented as extra stylesheet and for the "small pointer" problem adjusted the `.arrow` settings - header.html: - placed the `` more as an extra stylesheet as otherwise a second scrollbar would occur - placed the `div id="top">` at the right place - corrected `` tag so it is properly closed (so XML compliant) - corrected setting of `data-topbar` to `data-topbar=true` as HTML5 does not allow attributes without values. --- src/Doxyfile.in | 4 +- src/common/doc/footer.html | 13 +- src/common/doc/header.html | 12 +- src/common/doc/stylesheet.css | 1363 +---------------------------------------- 4 files changed, 24 insertions(+), 1368 deletions(-) mode change 100644 => 100755 src/common/doc/stylesheet.css diff --git a/src/Doxyfile.in b/src/Doxyfile.in index 54ec9078..dc11d217 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -1117,7 +1117,7 @@ HTML_FOOTER = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/footer.html # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_STYLESHEET = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/stylesheet.css +HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets @@ -1130,7 +1130,7 @@ HTML_STYLESHEET = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/stylesheet.css # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_EXTRA_STYLESHEET = +HTML_EXTRA_STYLESHEET = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/stylesheet.css # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note diff --git a/src/common/doc/footer.html b/src/common/doc/footer.html index 4168c6bc..08a2cbd0 100644 --- a/src/common/doc/footer.html +++ b/src/common/doc/footer.html @@ -1,5 +1,9 @@ - + + + + diff --git a/src/common/doc/header.html b/src/common/doc/header.html index 7c20478b..a97e1b2f 100644 --- a/src/common/doc/header.html +++ b/src/common/doc/header.html @@ -8,9 +8,6 @@ $projectname: $title $title - - - @@ -18,13 +15,17 @@ $treeview $search $mathjax + + + $extrastylesheet +
-
diff --git a/src/common/doc/stylesheet.css b/src/common/doc/stylesheet.css old mode 100644 new mode 100755 index 1df177a4..f31f5df4 --- a/src/common/doc/stylesheet.css +++ b/src/common/doc/stylesheet.css @@ -1,1367 +1,18 @@ -/* The standard CSS for doxygen 1.8.6 */ - -body, table, div, p, dl { - font: 400 14px/22px Roboto,sans-serif; -} - -/* @group Heading Levels */ - -h1.groupheader { - font-size: 150%; -} - -.title { - font: 400 14px/28px Roboto,sans-serif; - font-size: 150%; - font-weight: bold; - margin: 10px 2px; -} - -h2.groupheader { - border-bottom: 1px solid #879ECB; - color: #354C7B; - font-size: 150%; - font-weight: normal; - margin-top: 1.75em; - padding-top: 8px; - padding-bottom: 4px; - width: 100%; -} - -h3.groupheader { - font-size: 100%; -} - -h1, h2, h3, h4, h5, h6 { - -webkit-transition: text-shadow 0.5s linear; - -moz-transition: text-shadow 0.5s linear; - -ms-transition: text-shadow 0.5s linear; - -o-transition: text-shadow 0.5s linear; - transition: text-shadow 0.5s linear; - margin-right: 15px; -} - -h1.glow, h2.glow, h3.glow, h4.glow, h5.glow, h6.glow { - text-shadow: 0 0 15px cyan; -} - -dt { - font-weight: bold; -} - -div.multicol { - -moz-column-gap: 1em; - -webkit-column-gap: 1em; - -moz-column-count: 3; - -webkit-column-count: 3; -} - -p.startli, p.startdd { - margin-top: 2px; -} - -p.starttd { - margin-top: 0px; -} - -p.endli { - margin-bottom: 0px; -} - -p.enddd { - margin-bottom: 4px; -} - -p.endtd { - margin-bottom: 2px; -} - -/* @end */ - -caption { - font-weight: bold; -} - -span.legend { - font-size: 70%; - text-align: center; -} - -h3.version { - font-size: 90%; - text-align: center; -} - -div.qindex, div.navtab{ - background-color: #EBEFF6; - border: 1px solid #A3B4D7; - text-align: center; -} - -div.qindex, div.navpath { - width: 100%; - line-height: 140%; -} - -div.navtab { - margin-right: 15px; -} - -/* @group Link Styling */ - -a { - color: #3D578C; - font-weight: normal; - text-decoration: none; -} - -.contents a:visited { - color: #4665A2; -} - -a:hover { - text-decoration: underline; -} - -a.qindex { - font-weight: bold; -} - -a.qindexHL { - font-weight: bold; - background-color: #9CAFD4; - color: #ffffff; - border: 1px double #869DCA; -} - -.contents a.qindexHL:visited { - color: #ffffff; -} - -a.el { - font-weight: bold; -} - -a.elRef { -} - -a.code, a.code:visited, a.line, a.line:visited { - color: #4665A2; -} - -a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited { - color: #4665A2; -} - -/* @end */ - -dl.el { - margin-left: -1cm; -} - -pre.fragment { - border: 1px solid #C4CFE5; - background-color: #FBFCFD; - padding: 4px 6px; - margin: 4px 8px 4px 2px; - overflow: auto; - word-wrap: break-word; - font-size: 9pt; - line-height: 125%; - font-family: monospace, fixed; - font-size: 105%; -} - -div.fragment { - padding: 4px 6px; - margin: 4px 8px 4px 2px; - background-color: #FBFCFD; - border: 1px solid #C4CFE5; -} - -div.line { - font-family: monospace, fixed; - font-size: 13px; - min-height: 13px; - line-height: 1.0; - text-wrap: unrestricted; - white-space: -moz-pre-wrap; /* Moz */ - white-space: -pre-wrap; /* Opera 4-6 */ - white-space: -o-pre-wrap; /* Opera 7 */ - white-space: pre-wrap; /* CSS3 */ - word-wrap: break-word; /* IE 5.5+ */ - text-indent: -53px; - padding-left: 53px; - padding-bottom: 0px; - margin: 0px; - -webkit-transition-property: background-color, box-shadow; - -webkit-transition-duration: 0.5s; - -moz-transition-property: background-color, box-shadow; - -moz-transition-duration: 0.5s; - -ms-transition-property: background-color, box-shadow; - -ms-transition-duration: 0.5s; - -o-transition-property: background-color, box-shadow; - -o-transition-duration: 0.5s; - transition-property: background-color, box-shadow; - transition-duration: 0.5s; -} - -div.line.glow { - background-color: cyan; - box-shadow: 0 0 10px cyan; -} - - -span.lineno { - padding-right: 4px; - text-align: right; - border-right: 2px solid #0F0; - background-color: #E8E8E8; - white-space: pre; -} -span.lineno a { - background-color: #D8D8D8; -} - -span.lineno a:hover { - background-color: #C8C8C8; -} - -div.ah { - background-color: black; - font-weight: bold; - color: #ffffff; - margin-bottom: 3px; - margin-top: 3px; - padding: 0.2em; - border: solid thin #333; - border-radius: 0.5em; - -webkit-border-radius: .5em; - -moz-border-radius: .5em; - box-shadow: 2px 2px 3px #999; - -webkit-box-shadow: 2px 2px 3px #999; - -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; - background-image: -webkit-gradient(linear, left top, left bottom, from(#eee), to(#000),color-stop(0.3, #444)); - background-image: -moz-linear-gradient(center top, #eee 0%, #444 40%, #000); -} - -div.groupHeader { - margin-left: 16px; - margin-top: 12px; - font-weight: bold; -} - -div.groupText { - margin-left: 16px; - font-style: italic; -} - -body { - background-color: white; - color: black; - margin: 0; -} - -div.contents { - margin-top: 10px; - margin-left: 12px; - margin-right: 8px; -} - -td.indexkey { - background-color: #EBEFF6; - font-weight: bold; - border: 1px solid #C4CFE5; - margin: 2px 0px 2px 0; - padding: 2px 10px; - white-space: nowrap; - vertical-align: top; -} - -td.indexvalue { - background-color: #EBEFF6; - border: 1px solid #C4CFE5; - padding: 2px 10px; - margin: 2px 0px; -} - -tr.memlist { - background-color: #EEF1F7; -} - -p.formulaDsp { - text-align: center; -} - -img.formulaDsp { - -} - -img.formulaInl { - vertical-align: middle; -} - -div.center { - text-align: center; - margin-top: 0px; - margin-bottom: 0px; - padding: 0px; -} - -div.center img { - border: 0px; -} - -address.footer { - text-align: right; - padding-right: 12px; -} - -img.footer { - border: 0px; - vertical-align: middle; -} - -/* @group Code Colorization */ - -span.keyword { - color: #008000 -} - -span.keywordtype { - color: #604020 -} - -span.keywordflow { - color: #e08000 -} - -span.comment { - color: #800000 -} - -span.preprocessor { - color: #806020 -} - -span.stringliteral { - color: #002080 -} - -span.charliteral { - color: #008080 -} - -span.vhdldigit { - color: #ff00ff -} - -span.vhdlchar { - color: #000000 -} - -span.vhdlkeyword { - color: #700070 -} - -span.vhdllogic { - color: #ff0000 -} - -blockquote { - background-color: #F7F8FB; - border-left: 2px solid #9CAFD4; - margin: 0 24px 0 4px; - padding: 0 12px 0 16px; -} - -/* @end */ - -/* -.search { - color: #003399; - font-weight: bold; -} - -form.search { - margin-bottom: 0px; - margin-top: 0px; -} - -input.search { - font-size: 75%; - color: #000080; - font-weight: normal; - background-color: #e8eef2; -} -*/ - -td.tiny { - font-size: 75%; -} - -.dirtab { - padding: 4px; - border-collapse: collapse; - border: 1px solid #A3B4D7; -} - -th.dirtab { - background: #EBEFF6; - font-weight: bold; -} - -hr { - height: 0px; - border: none; - border-top: 1px solid #4A6AAA; -} - -hr.footer { - height: 1px; -} - -/* @group Member Descriptions */ - -table.memberdecls { - border-spacing: 0px; - padding: 0px; -} - -.memberdecls td, .fieldtable tr { - -webkit-transition-property: background-color, box-shadow; - -webkit-transition-duration: 0.5s; - -moz-transition-property: background-color, box-shadow; - -moz-transition-duration: 0.5s; - -ms-transition-property: background-color, box-shadow; - -ms-transition-duration: 0.5s; - -o-transition-property: background-color, box-shadow; - -o-transition-duration: 0.5s; - transition-property: background-color, box-shadow; - transition-duration: 0.5s; -} - -.memberdecls td.glow, .fieldtable tr.glow { - background-color: cyan; - box-shadow: 0 0 15px cyan; -} - -.mdescLeft, .mdescRight, -.memItemLeft, .memItemRight, -.memTemplItemLeft, .memTemplItemRight, .memTemplParams { - background-color: #F9FAFC; - border: none; - margin: 4px; - padding: 1px 0 0 8px; -} - -.mdescLeft, .mdescRight { - padding: 0px 8px 4px 8px; - color: #555; -} - -.memSeparator { - border-bottom: 1px solid #DEE4F0; - line-height: 1px; - margin: 0px; - padding: 0px; -} - -.memItemLeft, .memTemplItemLeft { - white-space: nowrap; -} - -.memItemRight { - width: 100%; -} - -.memTemplParams { - color: #4665A2; - white-space: nowrap; - font-size: 80%; -} - -/* @end */ - -/* @group Member Details */ - -/* Styles for detailed member documentation */ - -.memtemplate { - font-size: 80%; - color: #4665A2; - font-weight: normal; - margin-left: 9px; -} - -.memnav { - background-color: #EBEFF6; - border: 1px solid #A3B4D7; - text-align: center; - margin: 2px; - margin-right: 15px; - padding: 2px; -} - -.mempage { - width: 100%; -} - -.memitem { - padding: 0; - margin-bottom: 10px; - margin-right: 5px; - -webkit-transition: box-shadow 0.5s linear; - -moz-transition: box-shadow 0.5s linear; - -ms-transition: box-shadow 0.5s linear; - -o-transition: box-shadow 0.5s linear; - transition: box-shadow 0.5s linear; - display: table !important; - width: 100%; -} - -.memitem.glow { - box-shadow: 0 0 15px cyan; -} - -.memname { - font-weight: bold; - margin-left: 6px; -} - -.memname td { - vertical-align: bottom; -} - -.memproto, dl.reflist dt { - border-top: 1px solid #A8B8D9; - border-left: 1px solid #A8B8D9; - border-right: 1px solid #A8B8D9; - padding: 6px 0px 6px 0px; - color: #253555; - font-weight: bold; - text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); - background-image:url('nav_f.png'); - background-repeat:repeat-x; - background-color: #E2E8F2; - /* opera specific markup */ - box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); - border-top-right-radius: 4px; - border-top-left-radius: 4px; - /* firefox specific markup */ - -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; - -moz-border-radius-topright: 4px; - -moz-border-radius-topleft: 4px; - /* webkit specific markup */ - -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); - -webkit-border-top-right-radius: 4px; - -webkit-border-top-left-radius: 4px; - -} - -.memdoc, dl.reflist dd { - border-bottom: 1px solid #A8B8D9; - border-left: 1px solid #A8B8D9; - border-right: 1px solid #A8B8D9; - padding: 6px 10px 2px 10px; - background-color: #FBFCFD; - border-top-width: 0; - background-image:url('nav_g.png'); - background-repeat:repeat-x; - background-color: #FFFFFF; - /* opera specific markup */ - border-bottom-left-radius: 4px; - border-bottom-right-radius: 4px; - box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); - /* firefox specific markup */ - -moz-border-radius-bottomleft: 4px; - -moz-border-radius-bottomright: 4px; - -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; - /* webkit specific markup */ - -webkit-border-bottom-left-radius: 4px; - -webkit-border-bottom-right-radius: 4px; - -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); -} - -dl.reflist dt { - padding: 5px; -} - -dl.reflist dd { - margin: 0px 0px 10px 0px; - padding: 5px; -} - -.paramkey { - text-align: right; -} - -.paramtype { - white-space: nowrap; -} - -.paramname { - color: #602020; - white-space: nowrap; -} -.paramname em { - font-style: normal; -} -.paramname code { - line-height: 14px; -} - -.params, .retval, .exception, .tparams { - margin-left: 0px; - padding-left: 0px; -} - -.params .paramname, .retval .paramname { - font-weight: bold; - vertical-align: top; -} - -.params .paramtype { - font-style: italic; - vertical-align: top; -} - -.params .paramdir { - font-family: "courier new",courier,monospace; - vertical-align: top; -} - -table.mlabels { - border-spacing: 0px; -} - -td.mlabels-left { - width: 100%; - padding: 0px; -} - -td.mlabels-right { - vertical-align: bottom; - padding: 0px; - white-space: nowrap; -} - -span.mlabels { - margin-left: 8px; -} - -span.mlabel { - background-color: #728DC1; - border-top:1px solid #5373B4; - border-left:1px solid #5373B4; - border-right:1px solid #C4CFE5; - border-bottom:1px solid #C4CFE5; - text-shadow: none; - color: white; - margin-right: 4px; - padding: 2px 3px; - border-radius: 3px; - font-size: 7pt; - white-space: nowrap; - vertical-align: middle; -} - - - -/* @end */ - -/* these are for tree view when not used as main index */ - -div.directory { - margin: 10px 0px; - border-top: 1px solid #A8B8D9; - border-bottom: 1px solid #A8B8D9; - width: 100%; -} - -.directory table { - border-collapse:collapse; -} - -.directory td { - margin: 0px; - padding: 0px; - vertical-align: top; -} - -.directory td.entry { - white-space: nowrap; - padding-right: 6px; - padding-top: 3px; -} - -.directory td.entry a { - outline:none; -} - -.directory td.entry a img { - border: none; -} - -.directory td.desc { - width: 100%; - padding-left: 6px; - padding-right: 6px; - padding-top: 3px; - border-left: 1px solid rgba(0,0,0,0.05); -} - -.directory tr.even { - padding-left: 6px; - background-color: #F7F8FB; -} - -.directory img { - vertical-align: -30%; -} - -.directory .levels { - white-space: nowrap; - width: 100%; - text-align: right; - font-size: 9pt; -} - -.directory .levels span { - cursor: pointer; - padding-left: 2px; - padding-right: 2px; - color: #3D578C; -} - -div.dynheader { - margin-top: 8px; - -webkit-touch-callout: none; - -webkit-user-select: none; - -khtml-user-select: none; - -moz-user-select: none; - -ms-user-select: none; - user-select: none; -} - -address { - font-style: normal; - color: #2A3D61; -} - -table.doxtable { - border-collapse:collapse; - margin-top: 4px; - margin-bottom: 4px; -} - -table.doxtable td, table.doxtable th { - border: 1px solid #2D4068; - padding: 3px 7px 2px; -} - -table.doxtable th { - background-color: #374F7F; - color: #FFFFFF; - font-size: 110%; - padding-bottom: 4px; - padding-top: 5px; -} - -table.fieldtable { - /*width: 100%;*/ - margin-bottom: 10px; - border: 1px solid #A8B8D9; - border-spacing: 0px; - -moz-border-radius: 4px; - -webkit-border-radius: 4px; - border-radius: 4px; - -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; - -webkit-box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); - box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); -} - -.fieldtable td, .fieldtable th { - padding: 3px 7px 2px; -} - -.fieldtable td.fieldtype, .fieldtable td.fieldname { - white-space: nowrap; - border-right: 1px solid #A8B8D9; - border-bottom: 1px solid #A8B8D9; - vertical-align: top; -} - -.fieldtable td.fieldname { - padding-top: 3px; -} - -.fieldtable td.fielddoc { - border-bottom: 1px solid #A8B8D9; - /*width: 100%;*/ -} - -.fieldtable td.fielddoc p:first-child { - margin-top: 0px; -} - -.fieldtable td.fielddoc p:last-child { - margin-bottom: 2px; -} - -.fieldtable tr:last-child td { - border-bottom: none; -} - -.fieldtable th { - background-image:url('nav_f.png'); - background-repeat:repeat-x; - background-color: #E2E8F2; - font-size: 90%; - color: #253555; - padding-bottom: 4px; - padding-top: 5px; - text-align:left; - -moz-border-radius-topleft: 4px; - -moz-border-radius-topright: 4px; - -webkit-border-top-left-radius: 4px; - -webkit-border-top-right-radius: 4px; - border-top-left-radius: 4px; - border-top-right-radius: 4px; - border-bottom: 1px solid #A8B8D9; -} - - -.tabsearch { - top: 0px; - left: 10px; - height: 36px; - background-image: url('tab_b.png'); - z-index: 101; - overflow: hidden; - font-size: 13px; -} - -.navpath ul -{ - font-size: 11px; - background-image:url('tab_b.png'); - background-repeat:repeat-x; - background-position: 0 -5px; - height:30px; - line-height:30px; - color:#8AA0CC; - border:solid 1px #C2CDE4; - overflow:hidden; - margin:0px; - padding:0px; -} - -.navpath li -{ - list-style-type:none; - float:left; - padding-left:10px; - padding-right:15px; - background-image:url('bc_s.png'); - background-repeat:no-repeat; - background-position:right; - color:#364D7C; -} - -.navpath li.navelem a -{ - height:32px; - display:block; - text-decoration: none; - outline: none; - color: #283A5D; - font-family: 'Lucida Grande',Geneva,Helvetica,Arial,sans-serif; - text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); - text-decoration: none; -} - -.navpath li.navelem a:hover -{ - color:#6884BD; -} - -.navpath li.footer -{ - list-style-type:none; - float:right; - padding-left:10px; - padding-right:15px; - background-image:none; - background-repeat:no-repeat; - background-position:right; - color:#364D7C; - font-size: 8pt; -} - - -div.summary -{ - float: right; - font-size: 8pt; - padding-right: 5px; - width: 50%; - text-align: right; -} - -div.summary a -{ - white-space: nowrap; -} - -div.ingroups -{ - font-size: 8pt; - width: 50%; - text-align: left; -} - -div.ingroups a -{ - white-space: nowrap; -} - -div.header -{ - background-image:url('nav_h.png'); - background-repeat:repeat-x; - background-color: #F9FAFC; - margin: 0px; - border-bottom: 1px solid #C4CFE5; -} - -div.headertitle -{ - padding: 5px 5px 5px 10px; -} - -dl -{ - padding: 0 0 0 10px; -} - -/* dl.note, dl.warning, dl.attention, dl.pre, dl.post, dl.invariant, dl.deprecated, dl.todo, dl.test, dl.bug */ -dl.section -{ - margin-left: 0px; - padding-left: 0px; -} - -dl.note -{ - margin-left:-7px; - padding-left: 3px; - border-left:4px solid; - border-color: #D0C000; -} - -dl.warning, dl.attention -{ - margin-left:-7px; - padding-left: 3px; - border-left:4px solid; - border-color: #FF0000; -} - -dl.pre, dl.post, dl.invariant -{ - margin-left:-7px; - padding-left: 3px; - border-left:4px solid; - border-color: #00D000; -} - -dl.deprecated -{ - margin-left:-7px; - padding-left: 3px; - border-left:4px solid; - border-color: #505050; -} - -dl.todo -{ - margin-left:-7px; - padding-left: 3px; - border-left:4px solid; - border-color: #00C0E0; -} - -dl.test -{ - margin-left:-7px; - padding-left: 3px; - border-left:4px solid; - border-color: #3030E0; -} - -dl.bug -{ - margin-left:-7px; - padding-left: 3px; - border-left:4px solid; - border-color: #C08050; -} - -dl.section dd { - margin-bottom: 6px; -} - - -#projectlogo -{ - text-align: center; - vertical-align: bottom; - border-collapse: separate; -} - -#projectlogo img -{ - border: 0px none; -} - #projectname { - border: 0px none; - font: 300% Tahoma, Arial,sans-serif; - margin: 0px; - padding: 2px 0px; + border: 0px none; } - #projectbrief { - font: 60% Tahoma, Arial,sans-serif; - margin: 0px; - padding: 0px; + font: 60% Tahoma, Arial,sans-serif; } - #projectnumber { - font: 80% Tahoma, Arial,sans-serif; - margin: 0px; - padding: 0px; -} - -#titlearea -{ - padding: 0px; - margin: 0px; - width: 100%; - border-bottom: 1px solid #5373B4; -} - -.image -{ - text-align: center; -} - -.dotgraph -{ - text-align: center; -} - -.mscgraph -{ - text-align: center; -} - -.diagraph -{ - text-align: center; -} - -.caption -{ - font-weight: bold; -} - -div.zoom -{ - border: 1px solid #90A5CE; -} - -dl.citelist { - margin-bottom:50px; -} - -dl.citelist dt { - color:#334975; - float:left; - font-weight:bold; - margin-right:10px; - padding:5px; -} - -dl.citelist dd { - margin:2px 0; - padding:5px 0; -} - -div.toc { - padding: 14px 25px; - background-color: #F4F6FA; - border: 1px solid #D8DFEE; - border-radius: 7px 7px 7px 7px; - float: right; - height: auto; - margin: 0 20px 10px 10px; - width: 200px; -} - -div.toc li { - background: url("bdwn.png") no-repeat scroll 0 5px transparent; - font: 10px/1.2 Verdana,DejaVu Sans,Geneva,sans-serif; - margin-top: 5px; - padding-left: 10px; - padding-top: 2px; -} - -div.toc h3 { - font: bold 12px/1.2 Arial,FreeSans,sans-serif; - color: #4665A2; - border-bottom: 0 none; - margin: 0; -} - -div.toc ul { - list-style: none outside none; - border: medium none; - padding: 0px; -} - -div.toc li.level1 { - margin-left: 0px; -} - -div.toc li.level2 { - margin-left: 15px; -} - -div.toc li.level3 { - margin-left: 30px; -} - -div.toc li.level4 { - margin-left: 45px; -} - -.inherit_header { - font-weight: bold; - color: gray; - cursor: pointer; - -webkit-touch-callout: none; - -webkit-user-select: none; - -khtml-user-select: none; - -moz-user-select: none; - -ms-user-select: none; - user-select: none; -} - -.inherit_header td { - padding: 6px 0px 2px 5px; -} - -.inherit { - display: none; -} - -tr.heading h2 { - margin-top: 12px; - margin-bottom: 4px; -} - -/* tooltip related style info */ - -.ttc { - position: absolute; - display: none; -} - -#powerTip { - cursor: default; - white-space: nowrap; - background-color: white; - border: 1px solid gray; - border-radius: 4px 4px 4px 4px; - box-shadow: 1px 1px 7px gray; - display: none; - font-size: smaller; - max-width: 80%; - opacity: 0.9; - padding: 1ex 1em 1em; - position: absolute; - z-index: 2147483647; -} - -#powerTip div.ttdoc { - color: grey; - font-style: italic; -} - -#powerTip div.ttname a { - font-weight: bold; -} - -#powerTip div.ttname { - font-weight: bold; -} - -#powerTip div.ttdeci { - color: #006318; -} - -#powerTip div { - margin: 0px; - padding: 0px; - font: 12px/16px Roboto,sans-serif; -} - -#powerTip:before, #powerTip:after { - content: ""; - position: absolute; - margin: 0px; -} - -#powerTip.n:after, #powerTip.n:before, -#powerTip.s:after, #powerTip.s:before, -#powerTip.w:after, #powerTip.w:before, -#powerTip.e:after, #powerTip.e:before, -#powerTip.ne:after, #powerTip.ne:before, -#powerTip.se:after, #powerTip.se:before, -#powerTip.nw:after, #powerTip.nw:before, -#powerTip.sw:after, #powerTip.sw:before { - border: solid transparent; - content: " "; - height: 0; - width: 0; - position: absolute; -} - -#powerTip.n:after, #powerTip.s:after, -#powerTip.w:after, #powerTip.e:after, -#powerTip.nw:after, #powerTip.ne:after, -#powerTip.sw:after, #powerTip.se:after { - border-color: rgba(255, 255, 255, 0); -} - -#powerTip.n:before, #powerTip.s:before, -#powerTip.w:before, #powerTip.e:before, -#powerTip.nw:before, #powerTip.ne:before, -#powerTip.sw:before, #powerTip.se:before { - border-color: rgba(128, 128, 128, 0); -} - -#powerTip.n:after, #powerTip.n:before, -#powerTip.ne:after, #powerTip.ne:before, -#powerTip.nw:after, #powerTip.nw:before { - top: 100%; -} - -#powerTip.n:after, #powerTip.ne:after, #powerTip.nw:after { - border-top-color: #ffffff; - border-width: 10px; - margin: 0px -10px; -} -#powerTip.n:before { - border-top-color: #808080; - border-width: 11px; - margin: 0px -11px; -} -#powerTip.n:after, #powerTip.n:before { - left: 50%; -} - -#powerTip.nw:after, #powerTip.nw:before { - right: 14px; -} - -#powerTip.ne:after, #powerTip.ne:before { - left: 14px; -} - -#powerTip.s:after, #powerTip.s:before, -#powerTip.se:after, #powerTip.se:before, -#powerTip.sw:after, #powerTip.sw:before { - bottom: 100%; -} - -#powerTip.s:after, #powerTip.se:after, #powerTip.sw:after { - border-bottom-color: #ffffff; - border-width: 10px; - margin: 0px -10px; -} - -#powerTip.s:before, #powerTip.se:before, #powerTip.sw:before { - border-bottom-color: #808080; - border-width: 11px; - margin: 0px -11px; -} - -#powerTip.s:after, #powerTip.s:before { - left: 50%; -} - -#powerTip.sw:after, #powerTip.sw:before { - right: 14px; -} - -#powerTip.se:after, #powerTip.se:before { - left: 14px; -} - -#powerTip.e:after, #powerTip.e:before { - left: 100%; -} -#powerTip.e:after { - border-left-color: #ffffff; - border-width: 10px; - top: 50%; - margin-top: -10px; -} -#powerTip.e:before { - border-left-color: #808080; - border-width: 11px; - top: 50%; - margin-top: -11px; -} - -#powerTip.w:after, #powerTip.w:before { - right: 100%; -} -#powerTip.w:after { - border-right-color: #ffffff; - border-width: 10px; - top: 50%; - margin-top: -10px; -} -#powerTip.w:before { - border-right-color: #808080; - border-width: 11px; - top: 50%; - margin-top: -11px; + font: 80% Tahoma, Arial,sans-serif; } - -@media print +.arrow { - #top { display: none; } - #side-nav { display: none; } - #nav-path { display: none; } - body { overflow:visible; } - h1, h2, h3, h4, h5, h6 { page-break-after: avoid; } - .summary { display: none; } - .memitem { page-break-inside: avoid; } - #doc-content - { - margin-left:0 !important; - height:auto !important; - width:auto !important; - overflow:inherit; - display:inline; - } + width: auto; + height: auto; + padding-left: 16px; } - -- cgit v1.2.3 From 6da9c23287bd98f018b3817d71e6c78fbd6665a7 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Wed, 1 Jun 2022 18:51:53 +0200 Subject: fix doc --- src/python/gudhi/tensorflow/cubical_layer.py | 2 +- src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py | 2 +- src/python/gudhi/tensorflow/rips_layer.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index e8674d7b..918ff43e 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -58,7 +58,7 @@ class CubicalLayer(tf.keras.layers.Layer): X (TensorFlow variable): pixel values of the cubical complex Returns: - dgms (list of tuple of TensorFlow variables): list of cubical persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively. Note that the essential part is always empty in cubical persistence diagrams, except in homology dimension zero, where the essential part always contains a single point, with abscissa equal to the smallest value in the complex, and infinite ordinate + List[Tuple[tf.Tensor,tf.Tensor]]: List of cubical persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively. Note that the essential part is always empty in cubical persistence diagrams, except in homology dimension zero, where the essential part always contains a single point, with abscissa equal to the smallest value in the complex, and infinite ordinate """ # Compute pixels associated to positive and negative simplices # Don't compute gradient for this operation diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index a2e48d8a..5606d1a4 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -65,7 +65,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): F (TensorFlow variable): filter function values over the vertices of the simplex tree. The ith entry of F corresponds to vertex i in self.simplextree Returns: - dgms (list of tuple of TensorFlow variables): list of lower-star persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively + List[Tuple[tf.Tensor,tf.Tensor]]: List of lower-star persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Don't try to compute gradients for the vertex pairs indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions) diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index b5b58ab4..97c2692d 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -63,7 +63,7 @@ class RipsLayer(tf.keras.layers.Layer): X (TensorFlow variable): point cloud of shape [number of points, number of dimensions] Returns: - dgms (list of tuple of TensorFlow variables): list of Rips persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively + List[Tuple[tf.Tensor,tf.Tensor]]: List of Rips persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Compute distance matrix DX = tf.norm(tf.expand_dims(X, 1)-tf.expand_dims(X, 0), axis=2) -- cgit v1.2.3 From c3199271e3e6cff0ae4e134c0409c9bb604fa1be Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Tue, 7 Jun 2022 10:47:40 +0200 Subject: fix doc + added homology field coeff --- src/python/gudhi/tensorflow/cubical_layer.py | 10 ++++++---- .../gudhi/tensorflow/lower_star_simplex_tree_layer.py | 15 +++++++++------ src/python/gudhi/tensorflow/rips_layer.py | 10 ++++++---- 3 files changed, 21 insertions(+), 14 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 918ff43e..16dc7d35 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -8,7 +8,7 @@ from ..cubical_complex import CubicalComplex # The parameters of the model are the pixel values. -def _Cubical(Xflat, Xdim, dimensions): +def _Cubical(Xflat, Xdim, dimensions, homology_coeff_field=11): # Parameters: Xflat (flattened image), # Xdim (shape of non-flattened image) # dimensions (homology dimensions) @@ -16,7 +16,7 @@ def _Cubical(Xflat, Xdim, dimensions): # Compute the persistence pairs with Gudhi # We reverse the dimensions because CubicalComplex uses Fortran ordering cc = CubicalComplex(dimensions=Xdim[::-1], top_dimensional_cells=Xflat) - cc.compute_persistence() + cc.compute_persistence(homology_coeff_field=homology_coeff_field) # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices cof_pp = cc.cofaces_of_persistence_pairs() @@ -37,17 +37,19 @@ class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing the persistent homology of a cubical complex """ - def __init__(self, dimensions, min_persistence=None, **kwargs): + def __init__(self, dimensions, min_persistence=None, homology_coeff_field=11, **kwargs): """ Constructor for the CubicalLayer class Parameters: dimensions (List[int]): homology dimensions min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) + homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337. """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions self.min_persistence = min_persistence if min_persistence != None else [0.] * len(self.dimensions) + self.hcf = homology_coeff_field assert len(self.min_persistence) == len(self.dimensions) def call(self, X): @@ -64,7 +66,7 @@ class CubicalLayer(tf.keras.layers.Layer): # Don't compute gradient for this operation Xflat = tf.reshape(X, [-1]) Xdim, Xflat_numpy = X.shape, Xflat.numpy() - indices_list = _Cubical(Xflat_numpy, Xdim, self.dimensions) + indices_list = _Cubical(Xflat_numpy, Xdim, self.dimensions, self.hcf) index_essential = np.argmin(Xflat_numpy) # index of minimum pixel value for essential persistence diagram # Get persistence diagram by simply picking the corresponding entries in the image self.dgms = [] diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index 5606d1a4..e0a5b457 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -7,10 +7,11 @@ import tensorflow as tf # The parameters of the model are the vertex function values of the simplex tree. -def _LowerStarSimplexTree(simplextree, filtration, dimensions): +def _LowerStarSimplexTree(simplextree, filtration, dimensions, homology_coeff_field=11): # Parameters: simplextree (simplex tree on which to compute persistence) # filtration (function values on the vertices of st), # dimensions (homology dimensions), + # homology_coeff_field (homology field coefficient) simplextree.reset_filtration(-np.inf, 0) @@ -20,7 +21,7 @@ def _LowerStarSimplexTree(simplextree, filtration, dimensions): simplextree.make_filtration_non_decreasing() # Compute persistence diagram - simplextree.compute_persistence() + simplextree.compute_persistence(homology_coeff_field=homology_coeff_field) # Get vertex pairs for optimization. First, get all simplex pairs pairs = simplextree.lower_star_persistence_generators() @@ -42,19 +43,21 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing lower-star persistence out of a simplex tree """ - def __init__(self, simplextree, dimensions, min_persistence=None, **kwargs): + def __init__(self, simplextree, dimensions, min_persistence=None, homology_coeff_field=11, **kwargs): """ Constructor for the LowerStarSimplexTreeLayer class Parameters: - simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n = number of vertices. Note that its filtration values are modified in each call of the class. + simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n-1, where n is its number of vertices. Note that its filtration values are modified in each call of the class. dimensions (List[int]): homology dimensions min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) + homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337. """ super().__init__(dynamic=True, **kwargs) self.dimensions = dimensions self.simplextree = simplextree - self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] + self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] + self.hcf = homology_coeff_field assert len(self.min_persistence) == len(self.dimensions) def call(self, filtration): @@ -68,7 +71,7 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): List[Tuple[tf.Tensor,tf.Tensor]]: List of lower-star persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively """ # Don't try to compute gradients for the vertex pairs - indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions) + indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions, self.hcf) # Get persistence diagrams self.dgms = [] for idx_dim, dimension in enumerate(self.dimensions): diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index 97c2692d..e4d6d4c6 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -8,7 +8,7 @@ from ..rips_complex import RipsComplex # The parameters of the model are the point coordinates. -def _Rips(DX, max_edge, dimensions): +def _Rips(DX, max_edge, dimensions, homology_coeff_field=11): # Parameters: DX (distance matrix), # max_edge (maximum edge length for Rips filtration), # dimensions (homology dimensions) @@ -16,7 +16,7 @@ def _Rips(DX, max_edge, dimensions): # Compute the persistence pairs with Gudhi rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge) st = rc.create_simplex_tree(max_dimension=max(dimensions)+1) - st.compute_persistence() + st.compute_persistence(homology_coeff_field=homology_coeff_field) pairs = st.flag_persistence_generators() L_indices = [] @@ -40,7 +40,7 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, dimensions, maximum_edge_length=np.inf, min_persistence=None, **kwargs): + def __init__(self, dimensions, maximum_edge_length=np.inf, min_persistence=None, homology_coeff_field=11, **kwargs): """ Constructor for the RipsLayer class @@ -48,11 +48,13 @@ class RipsLayer(tf.keras.layers.Layer): maximum_edge_length (float): maximum edge length for the Rips complex dimensions (List[int]): homology dimensions min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) + homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337. """ super().__init__(dynamic=True, **kwargs) self.max_edge = maximum_edge_length self.dimensions = dimensions self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] + self.hcf = homology_coeff_field assert len(self.min_persistence) == len(self.dimensions) def call(self, X): @@ -69,7 +71,7 @@ class RipsLayer(tf.keras.layers.Layer): DX = tf.norm(tf.expand_dims(X, 1)-tf.expand_dims(X, 0), axis=2) # Compute vertices associated to positive and negative simplices # Don't compute gradient for this operation - indices = _Rips(DX.numpy(), self.max_edge, self.dimensions) + indices = _Rips(DX.numpy(), self.max_edge, self.dimensions, self.hcf) # Get persistence diagrams by simply picking the corresponding entries in the distance matrix self.dgms = [] for idx_dim, dimension in enumerate(self.dimensions): -- cgit v1.2.3 From 2d20991dd44c621b7becd06c086948f666de4da4 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 7 Jun 2022 14:57:41 +0200 Subject: Rephrase description for fetch functions --- src/python/gudhi/datasets/remote.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py index 48bdcfa6..f6d3fe56 100644 --- a/src/python/gudhi/datasets/remote.py +++ b/src/python/gudhi/datasets/remote.py @@ -142,7 +142,7 @@ def _get_archive_path(file_path, label): def fetch_spiral_2d(file_path = None): """ - Fetch spiral_2d dataset remotely. + Load the spiral_2d dataset. Note that if the dataset already exists in the target location, it is not downloaded again, and the corresponding array is returned from cache. @@ -174,7 +174,7 @@ def fetch_spiral_2d(file_path = None): def fetch_bunny(file_path = None, accept_license = False): """ - Fetch Stanford bunny dataset remotely and its LICENSE file. + Load the Stanford bunny dataset. This dataset contains 35947 vertices. -- cgit v1.2.3 From 3e703a44f81623049dd481537e1e2e3457d32943 Mon Sep 17 00:00:00 2001 From: Manu Date: Tue, 7 Jun 2022 15:52:26 +0200 Subject: DiagramScaler removed from Entropy --- src/python/gudhi/representations/vector_methods.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py index 7d6a7f27..69ff5e1e 100644 --- a/src/python/gudhi/representations/vector_methods.py +++ b/src/python/gudhi/representations/vector_methods.py @@ -508,14 +508,7 @@ class Entropy(BaseEstimator, TransformerMixin): new_X = BirthPersistenceTransform().fit_transform(X) for i in range(num_diag): - orig_diagram, diagram, num_pts_in_diag = X[i], new_X[i], X[i].shape[0] - try: - #new_diagram = DiagramScaler(use=True, scalers=[([1], MaxAbsScaler())]).fit_transform([diagram])[0] - new_diagram = DiagramScaler().fit_transform([diagram])[0] - except ValueError: - # Empty persistence diagram case - https://github.com/GUDHI/gudhi-devel/issues/507 - assert len(diagram) == 0 - new_diagram = np.empty(shape = [0, 2]) + orig_diagram, new_diagram, num_pts_in_diag = X[i], new_X[i], X[i].shape[0] p = new_diagram[:,1] p = p/np.sum(p) -- cgit v1.2.3 From 0ac4c3383495a78c7a7e5dab9eb573df49f32004 Mon Sep 17 00:00:00 2001 From: albert-github Date: Wed, 8 Jun 2022 12:41:38 +0200 Subject: issue #613 [cpp documentation] Footer needs to be generated with a more recent version of doxygen With the doxygen versions <= 1.9.2 the default setting 'overflow: hidden;' causes problems. With the commit: ``` Commit: 590198b416cd53313d150428d2f912586065ea0d [590198b] Date: Wednesday, December 1, 2021 1:37:26 PM issue #8924 Horizontal scroll bar missing in HTML for wide class="dotgraph" objects ``` for the doxygen 1.9.3 version this has already been corrected but to run properly with the <= 1.9.2 version this setting is required --- src/common/doc/stylesheet.css | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/common/doc/stylesheet.css b/src/common/doc/stylesheet.css index f31f5df4..fb030e1f 100755 --- a/src/common/doc/stylesheet.css +++ b/src/common/doc/stylesheet.css @@ -16,3 +16,13 @@ height: auto; padding-left: 16px; } +// With the doxygen versions <= 1.9.2 the default setting 'overflow: hidden;' causes problems. +// With the commit: +// Commit: 590198b416cd53313d150428d2f912586065ea0d [590198b] +// Date: Wednesday, December 1, 2021 1:37:26 PM +// issue #8924 Horizontal scroll bar missing in HTML for wide class="dotgraph" objects +// for the doxygen 1.9.3 version this has already been corrected but to run properly with the <= 1.9.2 version +// this setting is required +ul { + overflow: visible; +} -- cgit v1.2.3 From d792eaf38ac91453a9a214285716d162b8eb74e2 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau <10407034+VincentRouvreau@users.noreply.github.com> Date: Thu, 9 Jun 2022 08:55:24 +0200 Subject: [skip ci] code review: Update .github/how_to_compile_gudhi_in_a_conda_env.md Co-authored-by: Hind-M <70631848+Hind-M@users.noreply.github.com> --- .github/how_to_compile_gudhi_in_a_conda_env.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index 0d677c1f..bc75cc60 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -78,7 +78,7 @@ firefox html/index.html # [optional] To display the c++ documentation. Anything cd /workdir/gudhi rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build -# python OFF to prevent python modules search makes cmake faster - it is the next one in user version that matters +# python OFF to prevent python modules search makes cmake faster - it is the next cmake call in user version that matters cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_PYTHON=OFF -DUSER_VERSION_DIR=version .. make user_version; cd version -- cgit v1.2.3 From 88f2ec40c0e879010dfb562054b99bd253787922 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau <10407034+VincentRouvreau@users.noreply.github.com> Date: Thu, 9 Jun 2022 08:56:38 +0200 Subject: [skip ci] doc review: Update .github/how_to_compile_gudhi_in_a_conda_env.md Co-authored-by: Hind-M <70631848+Hind-M@users.noreply.github.com> --- .github/how_to_compile_gudhi_in_a_conda_env.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index bc75cc60..4bd4e828 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -43,7 +43,7 @@ cd build cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. cd src/python # To build python module in parallel -python setup.py build_ext -j 16 --inplace # 16 is the number of CPUthat are used to compile the python module. Can be any other value. +python setup.py build_ext -j 16 --inplace # 16 is the number of CPU that are used to compile the python module. Can be any other value. # to clean the build # python setup.py clean --all ``` -- cgit v1.2.3 From 09a412d9c444666889c335fef592753df49125d3 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau <10407034+VincentRouvreau@users.noreply.github.com> Date: Thu, 9 Jun 2022 08:57:03 +0200 Subject: [skip ci] doc review: Update .github/how_to_compile_gudhi_in_a_conda_env.md Co-authored-by: Hind-M <70631848+Hind-M@users.noreply.github.com> --- .github/how_to_compile_gudhi_in_a_conda_env.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index 4bd4e828..d55f53f1 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -87,6 +87,6 @@ cd build cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. cd python # To build python module in parallel -python setup.py build_ext -j 16 --inplace # 16 is the number of CPUthat are used to compile the python module. Can be any other value. +python setup.py build_ext -j 16 --inplace # 16 is the number of CPU that are used to compile the python module. Can be any other value. firefox sphinx/index.html # [optional] To display the python documentation. Anything else than firefox can be used. ``` \ No newline at end of file -- cgit v1.2.3 From 5839625a253fbb54b40205da1a3e2811b29a1be2 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Thu, 9 Jun 2022 09:06:21 +0200 Subject: [skip ci] doc review: rename the repository gudhi-devel, precise how to clone and init submodules --- .github/how_to_compile_gudhi_in_a_conda_env.md | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index d55f53f1..fc9b37a8 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -15,7 +15,8 @@ mamba install -c conda-forge python cmake doxygen eigen cgal-cpp Some of the requirements are in the gudhi repository (please refer to [how to use github to contribute to gudhi](how_to_use_github_to_contribute_to_gudhi.md)). -In the gudhi repository - let's call it `/workdir/gudhi` i.e. - once submodules are initialised: +Once the gudhi-devel repository is cloned on your machine (`git clone...`) - let's call it `/workdir/gudhi-devel` i.e. - +and once the submodules are initialised (`git submodule update --init`): ```bash pip install -r ext/gudhi-deploy/build-requirements.txt @@ -26,7 +27,7 @@ pip install -r ext/gudhi-deploy/test-requirements.txt # pytorch can be painful In order to compile all c++ utilities, examples, benchmarks, unitary tests, and python module: ```bash -cd /workdir/gudhi +cd /workdir/gudhi-devel rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build # To build all even examples and benchmarks @@ -37,7 +38,7 @@ cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX -DWITH_GUDHI_ In order to compile only python module ```bash -cd /workdir/gudhi +cd /workdir/gudhi-devel rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_PREFIX_PATH=$CONDA_PREFIX .. @@ -50,13 +51,13 @@ python setup.py build_ext -j 16 --inplace # 16 is the number of CPU that are us In order to use freshly compiled gudhi python module: ```bash -PYTHONPATH=/workdir/gudhi/build/src/python python # or ipython, jupyter, ... +PYTHONPATH=/workdir/gudhi-devel/build/src/python python # or ipython, jupyter, ... ``` ### Specific C++ documentation generation ```bash -cd /workdir/gudhi +cd /workdir/gudhi-devel rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build # python OFF to prevent python modules search makes cmake faster @@ -75,7 +76,7 @@ firefox html/index.html # [optional] To display the c++ documentation. Anything ### Specific python documentation generation ```bash -cd /workdir/gudhi +cd /workdir/gudhi-devel rm -rf build; mkdir build # /!\ any existing build folder will be removed cd build # python OFF to prevent python modules search makes cmake faster - it is the next cmake call in user version that matters -- cgit v1.2.3 From 88573b11c3cb7b65fc0f716054d7258e73d9beaf Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Thu, 9 Jun 2022 10:11:11 +0200 Subject: [skip ci] doc review: rename the repository gudhi-devel --- .github/how_to_compile_gudhi_in_a_conda_env.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/how_to_compile_gudhi_in_a_conda_env.md b/.github/how_to_compile_gudhi_in_a_conda_env.md index fc9b37a8..4acfca2e 100644 --- a/.github/how_to_compile_gudhi_in_a_conda_env.md +++ b/.github/how_to_compile_gudhi_in_a_conda_env.md @@ -13,7 +13,7 @@ conda activate gudhi mamba install -c conda-forge python cmake doxygen eigen cgal-cpp ``` -Some of the requirements are in the gudhi repository (please refer to +Some of the requirements are in the gudhi-devel repository (please refer to [how to use github to contribute to gudhi](how_to_use_github_to_contribute_to_gudhi.md)). Once the gudhi-devel repository is cloned on your machine (`git clone...`) - let's call it `/workdir/gudhi-devel` i.e. - and once the submodules are initialised (`git submodule update --init`): -- cgit v1.2.3 From 45d8f7c9b84d6123d117298eea38310117cc06f8 Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Thu, 9 Jun 2022 15:44:53 +0200 Subject: removed default field coefficient --- src/python/gudhi/tensorflow/cubical_layer.py | 2 +- src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py | 2 +- src/python/gudhi/tensorflow/rips_layer.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index 16dc7d35..d68c7556 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -8,7 +8,7 @@ from ..cubical_complex import CubicalComplex # The parameters of the model are the pixel values. -def _Cubical(Xflat, Xdim, dimensions, homology_coeff_field=11): +def _Cubical(Xflat, Xdim, dimensions, homology_coeff_field): # Parameters: Xflat (flattened image), # Xdim (shape of non-flattened image) # dimensions (homology dimensions) diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index e0a5b457..4ec3f7c7 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -7,7 +7,7 @@ import tensorflow as tf # The parameters of the model are the vertex function values of the simplex tree. -def _LowerStarSimplexTree(simplextree, filtration, dimensions, homology_coeff_field=11): +def _LowerStarSimplexTree(simplextree, filtration, dimensions, homology_coeff_field): # Parameters: simplextree (simplex tree on which to compute persistence) # filtration (function values on the vertices of st), # dimensions (homology dimensions), diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index e4d6d4c6..fca336f3 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -8,7 +8,7 @@ from ..rips_complex import RipsComplex # The parameters of the model are the point coordinates. -def _Rips(DX, max_edge, dimensions, homology_coeff_field=11): +def _Rips(DX, max_edge, dimensions, homology_coeff_field): # Parameters: DX (distance matrix), # max_edge (maximum edge length for Rips filtration), # dimensions (homology dimensions) -- cgit v1.2.3 From 4f4fef6c14afa8371d9273b7161c78213bfcd6f6 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 13 Jun 2022 16:40:09 +0200 Subject: Move new option in the correct file. Disable every option when only build cpp documentation --- CMakeLists.txt | 3 --- src/cmake/modules/GUDHI_options.cmake | 9 +++++++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 47d87cd1..f946e111 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,6 +1,3 @@ - -option(WITH_GUDHI_CPP_DOCUMENTATION_ONLY "Build only the GUDHI C++ documentation (with doxygen)." OFF) - cmake_minimum_required(VERSION 3.5) project(GUDHIdev) diff --git a/src/cmake/modules/GUDHI_options.cmake b/src/cmake/modules/GUDHI_options.cmake index bffb3ffc..6655d605 100644 --- a/src/cmake/modules/GUDHI_options.cmake +++ b/src/cmake/modules/GUDHI_options.cmake @@ -3,3 +3,12 @@ option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation and installa option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON) option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON) +option(WITH_GUDHI_CPP_DOCUMENTATION_ONLY "Build only the GUDHI C++ documentation (with doxygen)." OFF) + +if (WITH_GUDHI_CPP_DOCUMENTATION_ONLY) + set (WITH_GUDHI_BENCHMARK OFF) + set (WITH_GUDHI_EXAMPLE OFF) + set (WITH_GUDHI_PYTHON OFF) + set (WITH_GUDHI_TEST OFF) + set (WITH_GUDHI_UTILITIES OFF) +endif() \ No newline at end of file -- cgit v1.2.3 From 8a0cae963c6b86cfb07da906af22bff7b55624db Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 13 Jun 2022 16:41:26 +0200 Subject: Use new option when only build cpp documentation - useful when performing user version also --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f6a875dd..3e99896a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -48,7 +48,7 @@ jobs: python: docker: - - image: gudhi/ci_for_gudhi:latest + - image: gudhi/doxygen_for_gudhi:latest steps: - checkout - run: @@ -58,7 +58,7 @@ jobs: git submodule update mkdir build cd build - cmake -DUSER_VERSION_DIR=version .. + cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON -DUSER_VERSION_DIR=version .. make user_version cd version cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 . @@ -89,12 +89,12 @@ jobs: git submodule update mkdir build cd build - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF -DUSER_VERSION_DIR=version .. + cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON -DUSER_VERSION_DIR=version .. make user_version cd version mkdir build cd build - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF .. + cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON .. make doxygen 2>&1 | tee dox.log grep warning dox.log cp dox.log html/ -- cgit v1.2.3 From 87d153e2f8d4c2a59f380fc07862609f124dc70c Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 13 Jun 2022 16:42:11 +0200 Subject: Warns no more on standard output but in a file --- src/Doxyfile.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Doxyfile.in b/src/Doxyfile.in index 06a74012..4af327c7 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -711,7 +711,7 @@ CITE_BIB_FILES = @CMAKE_SOURCE_DIR@/biblio/bibliography.bib \ # messages are off. # The default value is: NO. -QUIET = NO +QUIET = YES # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES @@ -765,7 +765,7 @@ WARN_FORMAT = "$file:$line: $text" # messages should be written. If left blank the output is written to standard # error (stderr). -WARN_LOGFILE = +WARN_LOGFILE = doxygen.log #--------------------------------------------------------------------------- # Configuration options related to the input files -- cgit v1.2.3 From 7d5206157185a184303b8eda42e2f9a2b39bf7d4 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 13 Jun 2022 16:42:47 +0200 Subject: Also apply the feature for user version also --- src/CMakeLists.txt | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 8023e04c..a4fcfcad 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -12,8 +12,10 @@ set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES") # This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path set(GUDHI_PYTHON_PATH "python") -# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH -include(GUDHI_third_party_libraries NO_POLICY_SCOPE) +if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) + # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH + include(GUDHI_third_party_libraries NO_POLICY_SCOPE) +endif() include(GUDHI_compilation_flags) @@ -67,7 +69,9 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES}) endforeach() endforeach() -add_subdirectory(GudhUI) +if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) + add_subdirectory(GudhUI) +endif() message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"") message("++ GUDHI_MISSING_MODULES list is:\"${GUDHI_MISSING_MODULES}\"") -- cgit v1.2.3 From 146da89aa32effe082333601b5141300717cd8c0 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Mon, 13 Jun 2022 17:09:58 +0200 Subject: Update submodule gudhi-deploy to take into account last modification for a docker dedicated to doxygen --- ext/gudhi-deploy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/gudhi-deploy b/ext/gudhi-deploy index 290ade10..e9e9a487 160000 --- a/ext/gudhi-deploy +++ b/ext/gudhi-deploy @@ -1 +1 @@ -Subproject commit 290ade1086bedbc96a35df886cadecabbf4072e6 +Subproject commit e9e9a4878731853d2d3149a5eac30df338a8197a -- cgit v1.2.3 From c69c9eec18336d44be157e4fd6ee5261b47ddb49 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 14 Jun 2022 10:34:34 +0200 Subject: Use boundary_opposite_vertex_simplex_range instead of boundary_simplex_range in cech --- .../include/gudhi/Cech_complex_blocker.h | 22 ++++++---------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 3141d27a..9917999f 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -74,25 +74,15 @@ class Cech_blocker { bool is_min_enclos_ball = false; // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices - for (auto face : sc_ptr_->boundary_simplex_range(sh)) { - // Find which vertex of sh is missing in face. We rely on the fact that simplex_vertex_range is sorted. - auto longlist = sc_ptr_->simplex_vertex_range(sh); - auto shortlist = sc_ptr_->simplex_vertex_range(face); - - auto longiter = std::begin(longlist); - auto shortiter = std::begin(shortlist); - auto enditer = std::end(shortlist); - while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; } - auto extra = *longiter; // Vertex_handle - + for (auto face_opposite_vertex : sc_ptr_->boundary_opposite_vertex_simplex_range(sh)) { Sphere sph; - auto k = sc_ptr_->key(face); + auto k = sc_ptr_->key(face_opposite_vertex.first); if(k != sc_ptr_->null_key()) { sph = cc_ptr_->get_cache().at(k); } else { Point_cloud face_points; - for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { + for (auto vertex : sc_ptr_->simplex_vertex_range(face_opposite_vertex.first)) { face_points.push_back(cc_ptr_->get_point(vertex)); #ifdef DEBUG_TRACES std::clog << "#(" << vertex << ")#"; @@ -100,13 +90,13 @@ class Cech_blocker { } sph = get_sphere(face_points.cbegin(), face_points.cend()); // Put edge sphere in cache - sc_ptr_->assign_key(face, cc_ptr_->get_cache().size()); + sc_ptr_->assign_key(face_opposite_vertex.first, cc_ptr_->get_cache().size()); cc_ptr_->get_cache().push_back(sph); // Clear face_points face_points.clear(); } - // Check if the minimal enclosing ball of current face contains the extra point - if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { + // Check if the minimal enclosing ball of current face contains the extra point/opposite vertex + if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) { #ifdef DEBUG_TRACES std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES -- cgit v1.2.3 From c8dc6c9a247f1eb541cea90dfc6f9bfb3e0d576e Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 15 Jun 2022 08:35:25 +0200 Subject: Docker image mix between doxygen/python --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3e99896a..e2997145 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -48,7 +48,7 @@ jobs: python: docker: - - image: gudhi/doxygen_for_gudhi:latest + - image: gudhi/ci_for_gudhi:latest steps: - checkout - run: @@ -79,7 +79,7 @@ jobs: doxygen: docker: - - image: gudhi/ci_for_gudhi:latest + - image: gudhi/doxygen_for_gudhi:latest steps: - checkout - run: -- cgit v1.2.3 From ccb0e44f107d1d09c178620566eeefd02feb6a4e Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 15 Jun 2022 08:35:58 +0200 Subject: doc review: no need 'for this' --- src/common/doc/installation.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 131130f8..b97142b6 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -42,8 +42,8 @@ make \endverbatim * \verbatim ctest --output-on-failure \endverbatim * * \subsection documentationgeneration C++ documentation - * To generate the C++ documentation, for this the doxygen program - * is required, run the following command in a terminal: + * To generate the C++ documentation, the doxygen program + * is required. Run the following command in a terminal: * \verbatim make doxygen \endverbatim * Documentation will be generated in a folder named html. * -- cgit v1.2.3 From bcc2c9584dc07d1cfcb870746110c524827d3bfa Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 15 Jun 2022 09:35:49 +0200 Subject: Use boundary_opposite_vertex_simplex_range instead of boundary_simplex_range in alpha --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index b1a9407b..5a0f0643 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -464,7 +464,8 @@ class Alpha_complex { using Vertex_handle = typename SimplicialComplexForAlpha::Vertex_handle; // ### Foreach Tau face of Sigma - for (auto f_boundary : complex.boundary_simplex_range(f_simplex)) { + for (auto face_opposite_vertex : complex.boundary_opposite_vertex_simplex_range(f_simplex)) { + auto f_boundary = face_opposite_vertex.first; #ifdef DEBUG_TRACES std::clog << " | --------------------------------------------------\n"; std::clog << " | Tau "; @@ -485,16 +486,8 @@ class Alpha_complex { #endif // DEBUG_TRACES // ### Else } else { - // Find which vertex of f_simplex is missing in f_boundary. We could actually write a variant of boundary_simplex_range that gives pairs (f_boundary, vertex). We rely on the fact that simplex_vertex_range is sorted. - auto longlist = complex.simplex_vertex_range(f_simplex); - auto shortlist = complex.simplex_vertex_range(f_boundary); - auto longiter = std::begin(longlist); - auto shortiter = std::begin(shortlist); - auto enditer = std::end(shortlist); - while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; } - Vertex_handle extra = *longiter; auto const& cache=get_cache(complex, f_boundary); - bool is_gab = kernel_.is_gabriel(cache, get_point_(extra)); + bool is_gab = kernel_.is_gabriel(cache, get_point_(face_opposite_vertex.second)); #ifdef DEBUG_TRACES std::clog << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << extra << std::endl; #endif // DEBUG_TRACES -- cgit v1.2.3 From 70cc462f71703f011fd5b8ba9da668b58f09059c Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 15 Jun 2022 09:41:57 +0200 Subject: Fix warning and debug traces --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 5a0f0643..aec8c1b1 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -461,7 +461,6 @@ class Alpha_complex { void propagate_alpha_filtration(SimplicialComplexForAlpha& complex, Simplex_handle f_simplex) { // From SimplicialComplexForAlpha type required to assign filtration values. using Filtration_value = typename SimplicialComplexForAlpha::Filtration_value; - using Vertex_handle = typename SimplicialComplexForAlpha::Vertex_handle; // ### Foreach Tau face of Sigma for (auto face_opposite_vertex : complex.boundary_opposite_vertex_simplex_range(f_simplex)) { @@ -489,7 +488,7 @@ class Alpha_complex { auto const& cache=get_cache(complex, f_boundary); bool is_gab = kernel_.is_gabriel(cache, get_point_(face_opposite_vertex.second)); #ifdef DEBUG_TRACES - std::clog << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << extra << std::endl; + std::clog << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << face_opposite_vertex.second << std::endl; #endif // DEBUG_TRACES // ### If Tau is not Gabriel of Sigma if (false == is_gab) { -- cgit v1.2.3 From 868369dd61fb6ef475ffa3af724907927121b6bb Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 16 Jun 2022 15:54:21 +0200 Subject: Add exact option for exact cech variant --- .../benchmark/cech_complex_benchmark.cpp | 22 ++++++++++++++-------- src/Cech_complex/include/gudhi/Cech_complex.h | 6 ++++-- .../include/gudhi/Cech_complex_blocker.h | 21 ++++++++++++++------- 3 files changed, 32 insertions(+), 17 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index d2a71879..19142780 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -31,7 +31,7 @@ using Points_off_reader = Gudhi::Points_off_reader; using Rips_complex = Gudhi::rips_complex::Rips_complex; template -Simplex_tree benchmark_cech(const std::string& off_file_points, const Filtration_value& radius, const int& dim_max) { +Simplex_tree benchmark_cech(const std::string& off_file_points, const Filtration_value& radius, const int& dim_max, const bool exact) { using Point_cgal = typename Kernel::Point_d; using Points_off_reader_cgal = Gudhi::Points_off_reader; using Cech_complex = Gudhi::cech_complex::Cech_complex; @@ -42,7 +42,7 @@ Simplex_tree benchmark_cech(const std::string& off_file_points, const Filtration Gudhi::Clock cech_clock("Cech computation"); Cech_complex cech_complex_from_points(off_reader_cgal.get_point_cloud(), radius); Simplex_tree cech_stree; - cech_complex_from_points.create_complex(cech_stree, dim_max); + cech_complex_from_points.create_complex(cech_stree, dim_max, exact); // ------------------------------------------ // Display information about the Cech complex @@ -56,8 +56,9 @@ int main(int argc, char* argv[]) { boost::filesystem::path full_path(boost::filesystem::current_path()); std::clog << "Current path is : " << full_path << std::endl; - std::clog << "File name ; Radius ; Rips time ; Dim-3 Epick Cech time ; Dynamic_dim Epick Cech time ; " - "Dim-3 Epeck Cech time ; Dynamic_dim Epeck Cech time ; Cech nb simplices ; Rips nb simplices;" + std::clog << "File name ; Radius ; Rips time ; Dim-3 Fast Cech time ; Dynamic_dim Fast Cech time ; " + "Dim-3 Safe Cech time ; Dynamic_dim Safe Cech time ; Dim-3 Exact Cech time ; Dynamic_dim Exact Cech time ; " + "Cech nb simplices ; Rips nb simplices;" << std::endl; boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) { @@ -83,10 +84,15 @@ int main(int argc, char* argv[]) { // -------------- // Cech complex // -------------- - benchmark_cech>>(itr->path().string(), radius, p0.size() - 1); - benchmark_cech>(itr->path().string(), radius, p0.size() - 1); - benchmark_cech>>(itr->path().string(), radius, p0.size() - 1); - auto cech_stree = benchmark_cech>(itr->path().string(), radius, p0.size() - 1); + // Fast + benchmark_cech>>(itr->path().string(), radius, p0.size() - 1, false); + benchmark_cech>(itr->path().string(), radius, p0.size() - 1, false); + // Safe + benchmark_cech>>(itr->path().string(), radius, p0.size() - 1, false); + benchmark_cech>(itr->path().string(), radius, p0.size() - 1, false); + // Exact + benchmark_cech>>(itr->path().string(), radius, p0.size() - 1, true); + auto cech_stree = benchmark_cech>(itr->path().string(), radius, p0.size() - 1, true); std::clog << cech_stree.num_simplices() << " ; "; std::clog << rips_stree.num_simplices() << ";" << std::endl; diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index fc39f75b..2c6d3df5 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -78,17 +78,19 @@ class Cech_complex { * * @param[in] complex SimplicialComplexForCech to be created. * @param[in] dim_max graph expansion until this given maximal dimension. + * @param[in] exact Exact filtration values computation. Not exact if `Kernel` is not CGAL::Epeck_d. * @exception std::invalid_argument In debug mode, if `complex.num_vertices()` does not return 0. * */ - void create_complex(SimplicialComplexForCechComplex& complex, int dim_max) { + void create_complex(SimplicialComplexForCechComplex& complex, int dim_max, const bool exact = false) { GUDHI_CHECK(complex.num_vertices() == 0, std::invalid_argument("Cech_complex::create_complex - simplicial complex is not empty")); // insert the proximity graph in the simplicial complex complex.insert_graph(cech_skeleton_graph_); // expand the graph until dimension dim_max - complex.expansion_with_blockers(dim_max, cech_blocker(&complex, this)); + complex.expansion_with_blockers(dim_max, cech_blocker(&complex, this, exact)); } /** @return max_radius value given at construction. */ diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 3141d27a..087390b6 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -94,9 +94,9 @@ class Cech_blocker { Point_cloud face_points; for (auto vertex : sc_ptr_->simplex_vertex_range(face)) { face_points.push_back(cc_ptr_->get_point(vertex)); - #ifdef DEBUG_TRACES - std::clog << "#(" << vertex << ")#"; - #endif // DEBUG_TRACES +#ifdef DEBUG_TRACES + std::clog << "#(" << vertex << ")#"; +#endif // DEBUG_TRACES } sph = get_sphere(face_points.cbegin(), face_points.cend()); // Put edge sphere in cache @@ -107,10 +107,13 @@ class Cech_blocker { } // Check if the minimal enclosing ball of current face contains the extra point if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(extra)) <= sph.second) { - #ifdef DEBUG_TRACES - std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; - #endif // DEBUG_TRACES +#ifdef DEBUG_TRACES + std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; +#endif // DEBUG_TRACES is_min_enclos_ball = true; +#if CGAL_VERSION_NR >= 1050000000 + if(exact_) CGAL::exact(sph.second); +#endif radius = std::sqrt(cast_to_fv(sph.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); cc_ptr_->get_cache().push_back(sph); @@ -124,6 +127,9 @@ class Cech_blocker { points.push_back(cc_ptr_->get_point(vertex)); } Sphere sph = get_sphere(points.cbegin(), points.cend()); +#if CGAL_VERSION_NR >= 1050000000 + if(exact_) CGAL::exact(sph.second); +#endif radius = std::sqrt(cast_to_fv(sph.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); @@ -138,12 +144,13 @@ class Cech_blocker { } /** \internal \brief Čech complex blocker constructor. */ - Cech_blocker(SimplicialComplexForCech* sc_ptr, Cech_complex* cc_ptr) : sc_ptr_(sc_ptr), cc_ptr_(cc_ptr) {} + Cech_blocker(SimplicialComplexForCech* sc_ptr, Cech_complex* cc_ptr, const bool exact) : sc_ptr_(sc_ptr), cc_ptr_(cc_ptr), exact_(exact) {} private: SimplicialComplexForCech* sc_ptr_; Cech_complex* cc_ptr_; Kernel kernel_; + const bool exact_; }; } // namespace cech_complex -- cgit v1.2.3 From 3fa972970514333d4db22ec7628c5c1a4de3c6e8 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 21 Jun 2022 15:04:27 +0200 Subject: -Add/modify some comments -Some other minor changes -Change license to LGPL --- .../benchmark/cech_complex_benchmark.cpp | 20 +++++++++++--------- src/Cech_complex/include/gudhi/Cech_complex.h | 2 +- .../include/gudhi/Cech_complex_blocker.h | 2 +- src/common/doc/main_page.md | 2 +- 4 files changed, 14 insertions(+), 12 deletions(-) diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp index 19142780..a9dc5d0d 100644 --- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp +++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp @@ -61,20 +61,22 @@ int main(int argc, char* argv[]) { "Cech nb simplices ; Rips nb simplices;" << std::endl; boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end + // For every ".off" file in the current directory, and for 3 predefined thresholds, compare Rips and various Cech constructions for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) { if (!boost::filesystem::is_directory(itr->status())) { if (itr->path().extension() == ".off") { Points_off_reader off_reader(itr->path().string()); Point p0 = off_reader.get_point_cloud()[0]; - - for (Filtration_value radius = 0.1; radius < 0.4; radius += 0.1) { + // Loop over the different thresholds + for (Filtration_value radius = 0.1; radius < 0.35; radius += 0.1) { std::clog << itr->path().stem() << " ; "; std::clog << radius << " ; "; Gudhi::Clock rips_clock("Rips computation"); Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius, Gudhi::Euclidean_distance()); Simplex_tree rips_stree; - rips_complex_from_points.create_complex(rips_stree, p0.size() - 1); + int dim_max = p0.size() - 1; + rips_complex_from_points.create_complex(rips_stree, dim_max); // ------------------------------------------ // Display information about the Rips complex // ------------------------------------------ @@ -85,14 +87,14 @@ int main(int argc, char* argv[]) { // Cech complex // -------------- // Fast - benchmark_cech>>(itr->path().string(), radius, p0.size() - 1, false); - benchmark_cech>(itr->path().string(), radius, p0.size() - 1, false); + benchmark_cech>>(itr->path().string(), radius, dim_max, false); + benchmark_cech>(itr->path().string(), radius, dim_max, false); // Safe - benchmark_cech>>(itr->path().string(), radius, p0.size() - 1, false); - benchmark_cech>(itr->path().string(), radius, p0.size() - 1, false); + benchmark_cech>>(itr->path().string(), radius, dim_max, false); + benchmark_cech>(itr->path().string(), radius, dim_max, false); // Exact - benchmark_cech>>(itr->path().string(), radius, p0.size() - 1, true); - auto cech_stree = benchmark_cech>(itr->path().string(), radius, p0.size() - 1, true); + benchmark_cech>>(itr->path().string(), radius, dim_max, true); + auto cech_stree = benchmark_cech>(itr->path().string(), radius, dim_max, true); std::clog << cech_stree.num_simplices() << " ; "; std::clog << rips_stree.num_simplices() << ";" << std::endl; diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index 2c6d3df5..bae21d28 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -30,7 +30,7 @@ namespace cech_complex { * \ingroup cech_complex * * \details - * Cech complex is a simplicial complex constructed from a proximity graph, where the set of all simplices is filtered + * Cech complex is a simplicial complex where the set of all simplices is filtered * by the radius of their minimal enclosing ball and bounded by the given max_radius. * * \tparam Kernel CGAL kernel: either Epick_d or Epeck_d. diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 087390b6..9cd49a52 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -133,7 +133,7 @@ class Cech_blocker { radius = std::sqrt(cast_to_fv(sph.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); - cc_ptr_->get_cache().push_back(sph); + cc_ptr_->get_cache().push_back(std::move(sph)); } #ifdef DEBUG_TRACES diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index 2cb02e3f..ce903405 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -180,7 +180,7 @@ Author: Vincent Rouvreau
Introduced in: GUDHI 2.2.0
- Copyright: MIT [(GPL v3)](../../licensing/)
+ Copyright: MIT [(LGPL v3)](../../licensing/)
Requires: \ref cgal -- cgit v1.2.3 From b829a198e16fbef4c0cb2698b2c723fa353aac55 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 24 Jun 2022 11:03:22 +0200 Subject: Use CGAL::NT_converter instead of CGAL::to_double in Sphere_circumradius --- src/Cech_complex/include/gudhi/Cech_complex.h | 2 +- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 1 + src/Cech_complex/include/gudhi/Sphere_circumradius.h | 15 +++++++++------ src/Cech_complex/test/test_cech_complex.cpp | 4 ++-- 4 files changed, 13 insertions(+), 9 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index bae21d28..08b7a72f 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -70,7 +70,7 @@ class Cech_complex { point_cloud_.assign(std::begin(points), std::end(points)); cech_skeleton_graph_ = Gudhi::compute_proximity_graph( - point_cloud_, max_radius_, Sphere_circumradius()); + point_cloud_, max_radius_, Sphere_circumradius()); } /** \brief Initializes the simplicial complex from the proximity graph and expands it until a given maximal diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 9cd49a52..25d9a71f 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -12,6 +12,7 @@ #define CECH_COMPLEX_BLOCKER_H_ #include // for casting from FT to Filtration_value +#include // for CGAL::exact #include #include diff --git a/src/Cech_complex/include/gudhi/Sphere_circumradius.h b/src/Cech_complex/include/gudhi/Sphere_circumradius.h index b0d9f7cc..790f6950 100644 --- a/src/Cech_complex/include/gudhi/Sphere_circumradius.h +++ b/src/Cech_complex/include/gudhi/Sphere_circumradius.h @@ -11,7 +11,7 @@ #ifndef SPHERE_CIRCUMRADIUS_H_ #define SPHERE_CIRCUMRADIUS_H_ -#include // for #include +#include // for #include which is not working/compiling alone #include // for std::sqrt #include @@ -22,14 +22,17 @@ namespace cech_complex { /** \private @brief Compute the circumradius of the sphere passing through points given by a range of coordinates. * The points are assumed to have the same dimension. */ -template +template class Sphere_circumradius { private: Kernel kernel_; public: + using FT = typename Kernel::FT; using Point = typename Kernel::Point_d; using Point_cloud = typename std::vector; + CGAL::NT_converter cast_to_fv; + /** \brief Circumradius of sphere passing through two points using CGAL. * * @param[in] point_1 @@ -38,8 +41,8 @@ class Sphere_circumradius { * \tparam Point must be a Kernel::Point_d from CGAL. * */ - double operator()(const Point& point_1, const Point& point_2) const { - return std::sqrt(CGAL::to_double(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; + Filtration_value operator()(const Point& point_1, const Point& point_2) const { + return std::sqrt(cast_to_fv(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.; } /** \brief Circumradius of sphere passing through point cloud using CGAL. @@ -49,8 +52,8 @@ class Sphere_circumradius { * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL. * */ - double operator()(const Point_cloud& point_cloud) const { - return std::sqrt(CGAL::to_double(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); + Filtration_value operator()(const Point_cloud& point_cloud) const { + return std::sqrt(cast_to_fv(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end()))); } }; diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp index ea32f596..f5980e6d 100644 --- a/src/Cech_complex/test/test_cech_complex.cpp +++ b/src/Cech_complex/test/test_cech_complex.cpp @@ -107,11 +107,11 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) { std::clog << vertex << ","; vp.push_back(points.at(vertex)); } - std::clog << ") - distance =" << Gudhi::cech_complex::Sphere_circumradius()(vp.at(0), vp.at(1)) + std::clog << ") - distance =" << Gudhi::cech_complex::Sphere_circumradius()(vp.at(0), vp.at(1)) << " - filtration =" << st.filtration(f_simplex) << std::endl; BOOST_CHECK(vp.size() == 2); GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), - Gudhi::cech_complex::Sphere_circumradius()(vp.at(0), vp.at(1))); + Gudhi::cech_complex::Sphere_circumradius()(vp.at(0), vp.at(1))); } } -- cgit v1.2.3 From 370c09100d94dc73f582ebbabb994bcd2a3820eb Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Fri, 24 Jun 2022 16:19:34 +0200 Subject: changed dimensions into homology_dimensions --- src/python/doc/cubical_complex_tflow_itf_ref.rst | 2 +- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 2 +- src/python/doc/rips_complex_tflow_itf_ref.rst | 2 +- src/python/gudhi/tensorflow/cubical_layer.py | 6 +++--- src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py | 6 +++--- src/python/gudhi/tensorflow/rips_layer.py | 6 +++--- src/python/test/test_diff.py | 8 ++++---- 7 files changed, 16 insertions(+), 16 deletions(-) diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index 18b97adf..b32f5e47 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -16,7 +16,7 @@ Example of gradient computed from cubical persistence import tensorflow as tf X = tf.Variable([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=tf.float32, trainable=True) - cl = CubicalLayer(dimensions=[0]) + cl = CubicalLayer(homology_dimensions=[0]) with tf.GradientTape() as tape: dgm = cl.call(X)[0][0] diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index b8518cdb..9d7d633f 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -29,7 +29,7 @@ Example of gradient computed from lower-star filtration of a simplex tree st.insert([9, 10]) F = tf.Variable([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=tf.float32, trainable=True) - sl = LowerStarSimplexTreeLayer(simplextree=st, dimensions=[0]) + sl = LowerStarSimplexTreeLayer(simplextree=st, homology_dimensions=[0]) with tf.GradientTape() as tape: dgm = sl.call(F)[0][0] diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 6c65c562..3ce75868 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -21,7 +21,7 @@ Example of gradient computed from Vietoris-Rips persistence import tensorflow as tf X = tf.Variable([[1.,1.],[2.,2.]], dtype=tf.float32, trainable=True) - rl = RipsLayer(maximum_edge_length=2., dimensions=[0]) + rl = RipsLayer(maximum_edge_length=2., homology_dimensions=[0]) with tf.GradientTape() as tape: dgm = rl.call(X)[0][0] diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py index d68c7556..3304e719 100644 --- a/src/python/gudhi/tensorflow/cubical_layer.py +++ b/src/python/gudhi/tensorflow/cubical_layer.py @@ -37,17 +37,17 @@ class CubicalLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing the persistent homology of a cubical complex """ - def __init__(self, dimensions, min_persistence=None, homology_coeff_field=11, **kwargs): + def __init__(self, homology_dimensions, min_persistence=None, homology_coeff_field=11, **kwargs): """ Constructor for the CubicalLayer class Parameters: - dimensions (List[int]): homology dimensions + homology_dimensions (List[int]): list of homology dimensions min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337. """ super().__init__(dynamic=True, **kwargs) - self.dimensions = dimensions + self.dimensions = homology_dimensions self.min_persistence = min_persistence if min_persistence != None else [0.] * len(self.dimensions) self.hcf = homology_coeff_field assert len(self.min_persistence) == len(self.dimensions) diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py index 4ec3f7c7..5a8e5b75 100644 --- a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py +++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py @@ -43,18 +43,18 @@ class LowerStarSimplexTreeLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing lower-star persistence out of a simplex tree """ - def __init__(self, simplextree, dimensions, min_persistence=None, homology_coeff_field=11, **kwargs): + def __init__(self, simplextree, homology_dimensions, min_persistence=None, homology_coeff_field=11, **kwargs): """ Constructor for the LowerStarSimplexTreeLayer class Parameters: simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n-1, where n is its number of vertices. Note that its filtration values are modified in each call of the class. - dimensions (List[int]): homology dimensions + homology_dimensions (List[int]): list of homology dimensions min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337. """ super().__init__(dynamic=True, **kwargs) - self.dimensions = dimensions + self.dimensions = homology_dimensions self.simplextree = simplextree self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] self.hcf = homology_coeff_field diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py index fca336f3..2a73472c 100644 --- a/src/python/gudhi/tensorflow/rips_layer.py +++ b/src/python/gudhi/tensorflow/rips_layer.py @@ -40,19 +40,19 @@ class RipsLayer(tf.keras.layers.Layer): """ TensorFlow layer for computing Rips persistence out of a point cloud """ - def __init__(self, dimensions, maximum_edge_length=np.inf, min_persistence=None, homology_coeff_field=11, **kwargs): + def __init__(self, homology_dimensions, maximum_edge_length=np.inf, min_persistence=None, homology_coeff_field=11, **kwargs): """ Constructor for the RipsLayer class Parameters: maximum_edge_length (float): maximum edge length for the Rips complex - dimensions (List[int]): homology dimensions + homology_dimensions (List[int]): list of homology dimensions min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions) homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337. """ super().__init__(dynamic=True, **kwargs) self.max_edge = maximum_edge_length - self.dimensions = dimensions + self.dimensions = homology_dimensions self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))] self.hcf = homology_coeff_field assert len(self.min_persistence) == len(self.dimensions) diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index e0a4717c..dca001a9 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -7,7 +7,7 @@ def test_rips_diff(): Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32) X = tf.Variable(initial_value=Xinit, trainable=True) - rl = RipsLayer(maximum_edge_length=2., dimensions=[0]) + rl = RipsLayer(maximum_edge_length=2., homology_dimensions=[0]) with tf.GradientTape() as tape: dgm = rl.call(X)[0][0] @@ -19,7 +19,7 @@ def test_cubical_diff(): Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32) X = tf.Variable(initial_value=Xinit, trainable=True) - cl = CubicalLayer(dimensions=[0]) + cl = CubicalLayer(homology_dimensions=[0]) with tf.GradientTape() as tape: dgm = cl.call(X)[0][0] @@ -31,7 +31,7 @@ def test_nonsquare_cubical_diff(): Xinit = np.array([[-1.,1.,0.],[1.,1.,1.]], dtype=np.float32) X = tf.Variable(initial_value=Xinit, trainable=True) - cl = CubicalLayer(dimensions=[0]) + cl = CubicalLayer(homology_dimensions=[0]) with tf.GradientTape() as tape: dgm = cl.call(X)[0][0] @@ -66,7 +66,7 @@ def test_st_diff(): Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32) F = tf.Variable(initial_value=Finit, trainable=True) - sl = LowerStarSimplexTreeLayer(simplextree=st, dimensions=[0]) + sl = LowerStarSimplexTreeLayer(simplextree=st, homology_dimensions=[0]) with tf.GradientTape() as tape: dgm = sl.call(F)[0][0] -- cgit v1.2.3 From e25eb0bdde286346670936c6057c3b746f8682b4 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 29 Jun 2022 08:37:24 +0200 Subject: All *something*build*something* to be ignored by git to allow several build directories --- .gitignore | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 6aab7337..9f427fb2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ # Classical CMake build directory -build/ +/*build* # Generated by Cython src/python/gudhi/*.cpp @@ -16,5 +16,3 @@ data/points/human.off_sc.txt # IDE specific # CLion .idea/ -cmake-build-debug/ - -- cgit v1.2.3 From 26a4381c6948338f935e107880cdf0789f65cb12 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 29 Jun 2022 10:03:38 +0200 Subject: third parties are not required when WITH_GUDHI_CPP_DOCUMENTATION_ONLY, but hera submodule is mandatory. Moved in a new cmake module --- CMakeLists.txt | 2 ++ src/CMakeLists.txt | 2 ++ src/cmake/modules/GUDHI_submodules.cmake | 5 +++++ src/cmake/modules/GUDHI_third_party_libraries.cmake | 6 ------ 4 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 src/cmake/modules/GUDHI_submodules.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index f946e111..1164eaab 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -13,6 +13,8 @@ set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES") # This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path set(GUDHI_PYTHON_PATH "src/python") +include(GUDHI_submodules) + if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH include(GUDHI_third_party_libraries NO_POLICY_SCOPE) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index a4fcfcad..d2f12e5f 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -12,6 +12,8 @@ set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES") # This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path set(GUDHI_PYTHON_PATH "python") +include(GUDHI_submodules) + if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH include(GUDHI_third_party_libraries NO_POLICY_SCOPE) diff --git a/src/cmake/modules/GUDHI_submodules.cmake b/src/cmake/modules/GUDHI_submodules.cmake new file mode 100644 index 00000000..78b045bd --- /dev/null +++ b/src/cmake/modules/GUDHI_submodules.cmake @@ -0,0 +1,5 @@ +# For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera. +set(HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include) +set(HERA_WASSERSTEIN_INCLUDE_DIR ${HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's wasserstein.h") +set(HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include) +set(HERA_BOTTLENECK_INCLUDE_DIR ${HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's bottleneck.h") \ No newline at end of file diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake index 6ba822ad..2cf6787e 100644 --- a/src/cmake/modules/GUDHI_third_party_libraries.cmake +++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake @@ -48,12 +48,6 @@ if(CGAL_FOUND) include( ${CGAL_USE_FILE} ) endif() -# For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera. -set(HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include) -set(HERA_WASSERSTEIN_INCLUDE_DIR ${HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's wasserstein.h") -set(HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include) -set(HERA_BOTTLENECK_INCLUDE_DIR ${HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's bottleneck.h") - option(WITH_GUDHI_USE_TBB "Build with Intel TBB parallelization" ON) # Find TBB package for parallel sort - not mandatory, just optional. -- cgit v1.2.3 From e31556d04e4a957dd38b3e52e91e1a6cedd47f65 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Wed, 29 Jun 2022 10:07:04 +0200 Subject: doxygen logs are now in doxygen.log file (cf. Doxyfile.in). grep and cp the correct file --- .circleci/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e2997145..64e7fbb1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -95,9 +95,9 @@ jobs: mkdir build cd build cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON .. - make doxygen 2>&1 | tee dox.log - grep warning dox.log - cp dox.log html/ + make doxygen 2>&1 + grep warning doxygen.log + cp doxygen.log html/ cp -R html /tmp/doxygen - store_artifacts: -- cgit v1.2.3 From 0b61062f68f5dde2e56e50f266a3f364ac39e86f Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 30 Jun 2022 09:59:23 +0200 Subject: Assign to sh same key as face in cech --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 1bb205b3..68a9f726 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -54,6 +54,7 @@ class Cech_blocker { using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle; using Filtration_value = typename SimplicialComplexForCech::Filtration_value; + using Simplex_key = typename SimplicialComplexForCech::Simplex_key; template Sphere get_sphere(PointIterator begin, PointIterator end) const { @@ -78,8 +79,10 @@ class Cech_blocker { for (auto face_opposite_vertex : sc_ptr_->boundary_opposite_vertex_simplex_range(sh)) { Sphere sph; auto k = sc_ptr_->key(face_opposite_vertex.first); + Simplex_key sph_key; if(k != sc_ptr_->null_key()) { sph = cc_ptr_->get_cache().at(k); + sph_key = k; } else { Point_cloud face_points; @@ -92,6 +95,7 @@ class Cech_blocker { sph = get_sphere(face_points.cbegin(), face_points.cend()); // Put edge sphere in cache sc_ptr_->assign_key(face_opposite_vertex.first, cc_ptr_->get_cache().size()); + sph_key = cc_ptr_->get_cache().size(); cc_ptr_->get_cache().push_back(sph); // Clear face_points face_points.clear(); @@ -106,8 +110,7 @@ class Cech_blocker { if(exact_) CGAL::exact(sph.second); #endif radius = std::sqrt(cast_to_fv(sph.second)); - sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); - cc_ptr_->get_cache().push_back(sph); + sc_ptr_->assign_key(sh, sph_key); break; } } -- cgit v1.2.3 From 17f68bb5be7a68fab17510740d2bdc28b70decea Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Thu, 30 Jun 2022 11:30:12 +0200 Subject: code review: rename option WITH_GUDHI_THIRD_PARTY --- .circleci/config.yml | 8 ++++---- .github/for_maintainers/tests_strategy.md | 5 +++++ CMakeLists.txt | 4 ++-- src/CMakeLists.txt | 4 ++-- src/cmake/modules/GUDHI_options.cmake | 4 ++-- src/common/doc/installation.h | 4 ++-- 6 files changed, 17 insertions(+), 12 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 64e7fbb1..82ad19db 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -58,7 +58,7 @@ jobs: git submodule update mkdir build cd build - cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON -DUSER_VERSION_DIR=version .. + cmake -DWITH_GUDHI_THIRD_PARTY=OFF -DUSER_VERSION_DIR=version .. make user_version cd version cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 . @@ -89,13 +89,13 @@ jobs: git submodule update mkdir build cd build - cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON -DUSER_VERSION_DIR=version .. + cmake -DWITH_GUDHI_THIRD_PARTY=OFF -DUSER_VERSION_DIR=version .. make user_version cd version mkdir build cd build - cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON .. - make doxygen 2>&1 + cmake -DWITH_GUDHI_THIRD_PARTY=OFF .. + make doxygen grep warning doxygen.log cp doxygen.log html/ cp -R html /tmp/doxygen diff --git a/.github/for_maintainers/tests_strategy.md b/.github/for_maintainers/tests_strategy.md index c25acf9b..01248d3a 100644 --- a/.github/for_maintainers/tests_strategy.md +++ b/.github/for_maintainers/tests_strategy.md @@ -4,6 +4,11 @@ This document tries to sum up the tests strategy that has been put in place for The aim is to help maintainers to anticipate third parties modifications, updates. +## CMake options + +[CMake GUDHI options](../../src/cmake/modules/GUDHI_options.cmake) allows to activate/deactivate what should be built and tested. +Note the special option `WITH_GUDHI_THIRD_PARTY` that, when set to `OFF`, accelerates doxygen documentation generation or `user_version` for instance. + ## Builds ### Linux diff --git a/CMakeLists.txt b/CMakeLists.txt index 1164eaab..6c233459 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,7 +15,7 @@ set(GUDHI_PYTHON_PATH "src/python") include(GUDHI_submodules) -if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) +if (WITH_GUDHI_THIRD_PARTY) # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH include(GUDHI_third_party_libraries NO_POLICY_SCOPE) endif() @@ -56,7 +56,7 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES}) endforeach() endforeach() -if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) +if (WITH_GUDHI_THIRD_PARTY) add_subdirectory(src/GudhUI) endif() diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index d2f12e5f..f9f77ef7 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -14,7 +14,7 @@ set(GUDHI_PYTHON_PATH "python") include(GUDHI_submodules) -if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) +if (WITH_GUDHI_THIRD_PARTY) # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH include(GUDHI_third_party_libraries NO_POLICY_SCOPE) endif() @@ -71,7 +71,7 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES}) endforeach() endforeach() -if (NOT WITH_GUDHI_CPP_DOCUMENTATION_ONLY) +if (WITH_GUDHI_THIRD_PARTY) add_subdirectory(GudhUI) endif() diff --git a/src/cmake/modules/GUDHI_options.cmake b/src/cmake/modules/GUDHI_options.cmake index 6655d605..fe328169 100644 --- a/src/cmake/modules/GUDHI_options.cmake +++ b/src/cmake/modules/GUDHI_options.cmake @@ -3,9 +3,9 @@ option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation and installa option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON) option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON) -option(WITH_GUDHI_CPP_DOCUMENTATION_ONLY "Build only the GUDHI C++ documentation (with doxygen)." OFF) +option(WITH_GUDHI_THIRD_PARTY "Activate/deactivate third party libraries cmake detection. When set to OFF, it is usefull for doxygen or user_version i.e." ON) -if (WITH_GUDHI_CPP_DOCUMENTATION_ONLY) +if (NOT WITH_GUDHI_THIRD_PARTY) set (WITH_GUDHI_BENCHMARK OFF) set (WITH_GUDHI_EXAMPLE OFF) set (WITH_GUDHI_PYTHON OFF) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index b97142b6..c17855b6 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -43,13 +43,13 @@ make \endverbatim * * \subsection documentationgeneration C++ documentation * To generate the C++ documentation, the doxygen program - * is required. Run the following command in a terminal: + * is required (version ≥ 1.9.3 is advised). Run the following command in a terminal: * \verbatim make doxygen \endverbatim * Documentation will be generated in a folder named html. * * In case there is not a full setup present and only the documentation should be build the following command sequence * can be used: -\verbatim cmake -DWITH_GUDHI_CPP_DOCUMENTATION_ONLY=ON .. +\verbatim cmake -DWITH_GUDHI_THIRD_PARTY=OFF .. make doxygen\endverbatim * * \subsection helloworld Hello world ! -- cgit v1.2.3 From 70b9a1e3633ce3bc6112488ea6e0342ea910c772 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 30 Jun 2022 16:43:47 +0200 Subject: Reuse vector of points and reserve to avoid reallocations --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 1bb205b3..fb452326 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -14,6 +14,8 @@ #include // for casting from FT to Filtration_value #include // for CGAL::exact +#include + #include #include #include @@ -73,6 +75,8 @@ class Cech_blocker { CGAL::NT_converter cast_to_fv; Filtration_value radius = 0; bool is_min_enclos_ball = false; + Point_cloud points; + points.reserve(boost::size(sc_ptr_->simplex_vertex_range(sh))); // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices for (auto face_opposite_vertex : sc_ptr_->boundary_opposite_vertex_simplex_range(sh)) { @@ -82,19 +86,18 @@ class Cech_blocker { sph = cc_ptr_->get_cache().at(k); } else { - Point_cloud face_points; for (auto vertex : sc_ptr_->simplex_vertex_range(face_opposite_vertex.first)) { - face_points.push_back(cc_ptr_->get_point(vertex)); + points.push_back(cc_ptr_->get_point(vertex)); #ifdef DEBUG_TRACES std::clog << "#(" << vertex << ")#"; #endif // DEBUG_TRACES } - sph = get_sphere(face_points.cbegin(), face_points.cend()); + sph = get_sphere(points.cbegin(), points.cend()); // Put edge sphere in cache sc_ptr_->assign_key(face_opposite_vertex.first, cc_ptr_->get_cache().size()); cc_ptr_->get_cache().push_back(sph); - // Clear face_points - face_points.clear(); + // Clear face points + points.clear(); } // Check if the minimal enclosing ball of current face contains the extra point/opposite vertex if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) { @@ -113,7 +116,6 @@ class Cech_blocker { } // Spheres of each face don't contain the whole simplex if(!is_min_enclos_ball) { - Point_cloud points; for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) { points.push_back(cc_ptr_->get_point(vertex)); } -- cgit v1.2.3 From 438f168ea992125382f8c23eb2cd9ad3e92688a7 Mon Sep 17 00:00:00 2001 From: albert-github Date: Fri, 1 Jul 2022 11:09:45 +0200 Subject: Don't reveal full path in the documentation but just the relative path In e,g, https://gudhi.inria.fr/doc/latest/struct_coefficient_field.html (in general in `../struct_coefficient_field.html`) we see lines like: ``` #include ``` and ``` The documentation for this struct was generated from the following file: /home/gailuron/workspace/gudhi/gudhi-devel/build/gudhi.3.5.0/concept/Persistent_cohomology/CoefficientField.h ``` instead of the relative names: ``` #include ``` and ``` The documentation for this struct was generated from the following file: src/Persistent_cohomology/concept/CoefficientField.h ``` (the links are pointing to the correct places in all cases.) This is corrected by stripping the path. --- src/Doxyfile.in | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/Doxyfile.in b/src/Doxyfile.in index fb68ceb1..ad4e95ca 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -152,7 +152,7 @@ FULL_PATH_NAMES = YES # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. -STRIP_FROM_PATH = +STRIP_FROM_PATH = @CMAKE_SOURCE_DIR@ # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which @@ -162,7 +162,8 @@ STRIP_FROM_PATH = # using the -I flag. STRIP_FROM_INC_PATH = include \ - concept + concept \ + @CMAKE_SOURCE_DIR@ # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't -- cgit v1.2.3 From 3f05f2c81481c9a14357d8674378244a27aaec56 Mon Sep 17 00:00:00 2001 From: albert-github Date: Fri, 1 Jul 2022 15:40:31 +0200 Subject: Ignore complete current build directory when building documentation Found the file ``` build/CMakeFiles/ShowIncludes/foo.h ``` during the build of the documentation. The build directory should be excluded. --- src/Doxyfile.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Doxyfile.in b/src/Doxyfile.in index fb68ceb1..c20dffeb 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -832,7 +832,7 @@ EXCLUDE = @CMAKE_SOURCE_DIR@/data/ \ @CMAKE_SOURCE_DIR@/ext/ \ @CMAKE_SOURCE_DIR@/README.md \ @CMAKE_SOURCE_DIR@/.github \ - @CMAKE_CURRENT_BINARY_DIR@/new_gudhi_version_creation.md \ + @CMAKE_CURRENT_BINARY_DIR@ \ @GUDHI_DOXYGEN_SOURCE_PREFIX@/GudhUI/ \ @GUDHI_DOXYGEN_SOURCE_PREFIX@/cmake/ \ @GUDHI_DOXYGEN_SOURCE_PREFIX@/python/ -- cgit v1.2.3 From dd41000ec13553787c8575f7aa55ce9156ad6b8a Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 1 Jul 2022 15:42:29 +0200 Subject: Use dimension to get range size to reserve vector in cech --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index fb452326..e7f548ba 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -14,8 +14,6 @@ #include // for casting from FT to Filtration_value #include // for CGAL::exact -#include - #include #include #include @@ -76,7 +74,7 @@ class Cech_blocker { Filtration_value radius = 0; bool is_min_enclos_ball = false; Point_cloud points; - points.reserve(boost::size(sc_ptr_->simplex_vertex_range(sh))); + points.reserve(sc_ptr_->dimension(sh)+1); // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices for (auto face_opposite_vertex : sc_ptr_->boundary_opposite_vertex_simplex_range(sh)) { -- cgit v1.2.3 From e854e1b2292de95ecdec021559926a1fba14659d Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 5 Jul 2022 17:23:06 +0200 Subject: No need for a sph copy anymore --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 68a9f726..47b6c5e3 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -77,11 +77,9 @@ class Cech_blocker { // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices for (auto face_opposite_vertex : sc_ptr_->boundary_opposite_vertex_simplex_range(sh)) { - Sphere sph; auto k = sc_ptr_->key(face_opposite_vertex.first); Simplex_key sph_key; if(k != sc_ptr_->null_key()) { - sph = cc_ptr_->get_cache().at(k); sph_key = k; } else { @@ -92,24 +90,23 @@ class Cech_blocker { std::clog << "#(" << vertex << ")#"; #endif // DEBUG_TRACES } - sph = get_sphere(face_points.cbegin(), face_points.cend()); // Put edge sphere in cache sc_ptr_->assign_key(face_opposite_vertex.first, cc_ptr_->get_cache().size()); sph_key = cc_ptr_->get_cache().size(); - cc_ptr_->get_cache().push_back(sph); + cc_ptr_->get_cache().push_back(get_sphere(face_points.cbegin(), face_points.cend())); // Clear face_points face_points.clear(); } // Check if the minimal enclosing ball of current face contains the extra point/opposite vertex - if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) { + if (kernel_.squared_distance_d_object()(cc_ptr_->get_cache()[sph_key].first, cc_ptr_->get_point(face_opposite_vertex.second)) <= cc_ptr_->get_cache()[sph_key].second) { #ifdef DEBUG_TRACES - std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; + std::clog << "center: " << cc_ptr_->get_cache()[sph_key].first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES is_min_enclos_ball = true; #if CGAL_VERSION_NR >= 1050000000 - if(exact_) CGAL::exact(sph.second); + if(exact_) CGAL::exact(cc_ptr_->get_cache()[sph_key].second); #endif - radius = std::sqrt(cast_to_fv(sph.second)); + radius = std::sqrt(cast_to_fv(cc_ptr_->get_cache()[sph_key].second)); sc_ptr_->assign_key(sh, sph_key); break; } -- cgit v1.2.3 From 7b4772c00c834bcc6693b48ce0b8431461ba8a53 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 6 Jul 2022 10:55:54 +0200 Subject: Use reference to sph for a better readability --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 543f677e..c6f02857 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -99,15 +99,16 @@ class Cech_blocker { points.clear(); } // Check if the minimal enclosing ball of current face contains the extra point/opposite vertex - if (kernel_.squared_distance_d_object()(cc_ptr_->get_cache()[sph_key].first, cc_ptr_->get_point(face_opposite_vertex.second)) <= cc_ptr_->get_cache()[sph_key].second) { + Sphere const& sph = cc_ptr_->get_cache()[sph_key]; + if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) { #ifdef DEBUG_TRACES - std::clog << "center: " << cc_ptr_->get_cache()[sph_key].first << ", radius: " << radius << std::endl; + std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES is_min_enclos_ball = true; #if CGAL_VERSION_NR >= 1050000000 - if(exact_) CGAL::exact(cc_ptr_->get_cache()[sph_key].second); + if(exact_) CGAL::exact(sph.second); #endif - radius = std::sqrt(cast_to_fv(cc_ptr_->get_cache()[sph_key].second)); + radius = std::sqrt(cast_to_fv(sph.second)); sc_ptr_->assign_key(sh, sph_key); break; } -- cgit v1.2.3 From 51f599d16a43e2c8e831284c6320b73324873c17 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 6 Jul 2022 11:24:29 +0200 Subject: Switch lines to use intermediate variable --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index c6f02857..fc92e8fa 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -92,8 +92,8 @@ class Cech_blocker { #endif // DEBUG_TRACES } // Put edge sphere in cache - sc_ptr_->assign_key(face_opposite_vertex.first, cc_ptr_->get_cache().size()); sph_key = cc_ptr_->get_cache().size(); + sc_ptr_->assign_key(face_opposite_vertex.first, sph_key); cc_ptr_->get_cache().push_back(get_sphere(points.cbegin(), points.cend())); // Clear face points points.clear(); -- cgit v1.2.3 From 2559b78a16cc0f7af9ea3b78d6b388c629b2a4ae Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 6 Jul 2022 15:34:46 +0200 Subject: Move clog of radius after its assignment --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index fc92e8fa..22b99c5c 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -101,15 +101,15 @@ class Cech_blocker { // Check if the minimal enclosing ball of current face contains the extra point/opposite vertex Sphere const& sph = cc_ptr_->get_cache()[sph_key]; if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) { -#ifdef DEBUG_TRACES - std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; -#endif // DEBUG_TRACES is_min_enclos_ball = true; + sc_ptr_->assign_key(sh, sph_key); #if CGAL_VERSION_NR >= 1050000000 if(exact_) CGAL::exact(sph.second); #endif radius = std::sqrt(cast_to_fv(sph.second)); - sc_ptr_->assign_key(sh, sph_key); +#ifdef DEBUG_TRACES + std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; +#endif // DEBUG_TRACES break; } } -- cgit v1.2.3 From eb0ab20bdbf624e6b20896fb2cca1af71af7d4a5 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 27 Jul 2022 11:36:56 +0200 Subject: Enable keops dtm warnings --- src/python/doc/installation.rst | 2 +- src/python/test/test_dtm.py | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst index dd476054..4eefd415 100644 --- a/src/python/doc/installation.rst +++ b/src/python/doc/installation.rst @@ -175,7 +175,7 @@ A complete configuration would be : Scikit-learn version 1.0.1 POT version 0.8.0 HNSWlib found - PyKeOps version [pyKeOps]: 1.5 + PyKeOps version [pyKeOps]: 2.1 EagerPy version 0.30.0 TensorFlow version 2.7.0 Sphinx version 4.3.0 diff --git a/src/python/test/test_dtm.py b/src/python/test/test_dtm.py index e46d616c..21cff055 100755 --- a/src/python/test/test_dtm.py +++ b/src/python/test/test_dtm.py @@ -91,11 +91,12 @@ def test_density(): def test_dtm_overflow_warnings(): pts = numpy.array([[10., 100000000000000000000000000000.], [1000., 100000000000000000000000000.]]) - + impl_warn = ["keops", "hnsw"] with warnings.catch_warnings(record=True) as w: - # TODO Test "keops" implementation as well when next version of pykeops (current is 1.5) is released (should fix the problem (cf. issue #543)) - dtm = DistanceToMeasure(2, implementation="hnsw") - r = dtm.fit_transform(pts) - assert len(w) == 1 - assert issubclass(w[0].category, RuntimeWarning) - assert "Overflow" in str(w[0].message) + for impl in impl_warn: + dtm = DistanceToMeasure(2, implementation=impl) + r = dtm.fit_transform(pts) + assert len(w) == 2 + for i in range(len(w)): + assert issubclass(w[i].category, RuntimeWarning) + assert "Overflow" in str(w[i].message) -- cgit v1.2.3 From 98106c3130aa2c7988743c50abd562cdf5af5456 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Thu, 28 Jul 2022 12:09:19 +0200 Subject: Modify doc in simplex_tree in order to match code --- src/python/gudhi/simplex_tree.pyx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx index 521a7763..05bfe22e 100644 --- a/src/python/gudhi/simplex_tree.pyx +++ b/src/python/gudhi/simplex_tree.pyx @@ -487,9 +487,9 @@ cdef class SimplexTree: otherwise it is kept. The algorithm then proceeds with the next candidate. .. warning:: - Several candidates of the same dimension may be inserted simultaneously before calling `block_simplex`, so - if you examine the complex in `block_simplex`, you may hit a few simplices of the same dimension that have - not been vetted by `block_simplex` yet, or have already been rejected but not yet removed. + Several candidates of the same dimension may be inserted simultaneously before calling `blocker_func`, so + if you examine the complex in `blocker_func`, you may hit a few simplices of the same dimension that have + not been vetted by `blocker_func` yet, or have already been rejected but not yet removed. :param max_dim: Expansion maximal dimension value. :type max_dim: int -- cgit v1.2.3 From 2a4f60b822b15c34058220beffd311f46f11d3b3 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 2 Aug 2022 12:04:08 +0200 Subject: Get filtration from face when it's the same as simplex --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 22b99c5c..6f082494 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -106,7 +106,10 @@ class Cech_blocker { #if CGAL_VERSION_NR >= 1050000000 if(exact_) CGAL::exact(sph.second); #endif - radius = std::sqrt(cast_to_fv(sph.second)); + if(k != sc_ptr_->null_key()) + radius = sc_ptr_->filtration(face_opposite_vertex.first); + else + radius = std::sqrt(cast_to_fv(sph.second)); #ifdef DEBUG_TRACES std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES -- cgit v1.2.3 From 1c5c80fb53075706ba74751468d748960fae9dd9 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 2 Aug 2022 14:56:51 +0200 Subject: Pull out the for loop in test --- src/python/test/test_dtm.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/python/test/test_dtm.py b/src/python/test/test_dtm.py index 21cff055..b276f041 100755 --- a/src/python/test/test_dtm.py +++ b/src/python/test/test_dtm.py @@ -92,11 +92,10 @@ def test_density(): def test_dtm_overflow_warnings(): pts = numpy.array([[10., 100000000000000000000000000000.], [1000., 100000000000000000000000000.]]) impl_warn = ["keops", "hnsw"] - with warnings.catch_warnings(record=True) as w: - for impl in impl_warn: + for impl in impl_warn: + with warnings.catch_warnings(record=True) as w: dtm = DistanceToMeasure(2, implementation=impl) r = dtm.fit_transform(pts) - assert len(w) == 2 - for i in range(len(w)): - assert issubclass(w[i].category, RuntimeWarning) - assert "Overflow" in str(w[i].message) + assert len(w) == 1 + assert issubclass(w[0].category, RuntimeWarning) + assert "Overflow" in str(w[0].message) -- cgit v1.2.3 From 6b28e9daa5480dadd0fa3a75d610f98e0c8a0ff5 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Tue, 2 Aug 2022 18:28:07 +0200 Subject: Get filtration from face regardless of key value --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 6f082494..18516e70 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -71,7 +71,6 @@ class Cech_blocker { * \return true if the simplex radius is greater than the Cech_complex max_radius*/ bool operator()(Simplex_handle sh) { using Point_cloud = std::vector; - CGAL::NT_converter cast_to_fv; Filtration_value radius = 0; bool is_min_enclos_ball = false; Point_cloud points; @@ -106,10 +105,7 @@ class Cech_blocker { #if CGAL_VERSION_NR >= 1050000000 if(exact_) CGAL::exact(sph.second); #endif - if(k != sc_ptr_->null_key()) - radius = sc_ptr_->filtration(face_opposite_vertex.first); - else - radius = std::sqrt(cast_to_fv(sph.second)); + radius = sc_ptr_->filtration(face_opposite_vertex.first); #ifdef DEBUG_TRACES std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; #endif // DEBUG_TRACES @@ -125,6 +121,7 @@ class Cech_blocker { #if CGAL_VERSION_NR >= 1050000000 if(exact_) CGAL::exact(sph.second); #endif + CGAL::NT_converter cast_to_fv; radius = std::sqrt(cast_to_fv(sph.second)); sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size()); -- cgit v1.2.3 From ac8e64471466b19aae3e525a6e6b384adf654f28 Mon Sep 17 00:00:00 2001 From: Hind-M Date: Wed, 3 Aug 2022 15:31:18 +0200 Subject: Remove useless exact option when getting filtration from face --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index 18516e70..7dcbe9b4 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -102,9 +102,6 @@ class Cech_blocker { if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) { is_min_enclos_ball = true; sc_ptr_->assign_key(sh, sph_key); -#if CGAL_VERSION_NR >= 1050000000 - if(exact_) CGAL::exact(sph.second); -#endif radius = sc_ptr_->filtration(face_opposite_vertex.first); #ifdef DEBUG_TRACES std::clog << "center: " << sph.first << ", radius: " << radius << std::endl; -- cgit v1.2.3 From 6bedbafa1741fb9f4da92ddd54eca9b6442c04fb Mon Sep 17 00:00:00 2001 From: Hind-M Date: Fri, 5 Aug 2022 14:51:07 +0200 Subject: Ensure valid filtration before setting it in cech --- src/Cech_complex/include/gudhi/Cech_complex_blocker.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h index e7f548ba..9b5c5add 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h +++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h @@ -130,7 +130,8 @@ class Cech_blocker { #ifdef DEBUG_TRACES if (radius > cc_ptr_->max_radius()) std::clog << "radius > max_radius => expansion is blocked\n"; #endif // DEBUG_TRACES - sc_ptr_->assign_filtration(sh, radius); + // Check that the filtration to be assigned (radius) would be valid + if (radius > sc_ptr_->filtration(sh)) sc_ptr_->assign_filtration(sh, radius); return (radius > cc_ptr_->max_radius()); } -- cgit v1.2.3 From dc7a0aed53d7b74580ed5625293e27cacd452c34 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Tue, 9 Aug 2022 16:06:06 +0200 Subject: Use boolean for pq_handle data type and specific update use for CGAL>=5.5 --- .../include/gudhi/Skeleton_blocker_contractor.h | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h index 56b76318..321d80be 100644 --- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h +++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h @@ -172,7 +172,7 @@ typename GeometricSimplifiableComplex::Vertex_handle> { }; typedef CGAL::Modifiable_priority_queue PQ; - typedef typename PQ::handle pq_handle; + typedef bool pq_handle; // An Edge_data is associated with EVERY edge in the complex (collapsible or not). @@ -196,7 +196,7 @@ typename GeometricSimplifiableComplex::Vertex_handle> { } bool is_in_PQ() const { - return PQHandle_ != PQ::null_handle(); + return PQHandle_ != false; } void set_PQ_handle(pq_handle h) { @@ -204,7 +204,7 @@ typename GeometricSimplifiableComplex::Vertex_handle> { } void reset_PQ_handle() { - PQHandle_ = PQ::null_handle(); + PQHandle_ = false; } private: @@ -238,16 +238,22 @@ typename GeometricSimplifiableComplex::Vertex_handle> { } void insert_in_PQ(Edge_handle edge, Edge_data& data) { - data.set_PQ_handle(heap_PQ_->push(edge)); + heap_PQ_->push(edge); + data.set_PQ_handle(true); ++current_num_edges_heap_; } void update_in_PQ(Edge_handle edge, Edge_data& data) { +#if CGAL_VERSION_NR < 1050500000 data.set_PQ_handle(heap_PQ_->update(edge, data.PQ_handle())); +#else + heap_PQ_->update(edge); +#endif } void remove_from_PQ(Edge_handle edge, Edge_data& data) { - data.set_PQ_handle(heap_PQ_->erase(edge, data.PQ_handle())); + heap_PQ_->erase(edge); + data.set_PQ_handle(false); --current_num_edges_heap_; } -- cgit v1.2.3 From 71cbf8f35814d247fe7421c079a6ea78da2282f4 Mon Sep 17 00:00:00 2001 From: Vincent Rouvreau Date: Tue, 9 Aug 2022 17:52:31 +0200 Subject: Use CGAL::CGAL_BOOST_PENDING_RELAXED_HEAP to have the exact same result - needs to invetigate to understand why --- src/Contraction/include/gudhi/Skeleton_blocker_contractor.h | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h index 321d80be..6911ca2e 100644 --- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h +++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h @@ -171,7 +171,12 @@ typename GeometricSimplifiableComplex::Vertex_handle> { Self const* algorithm_; }; +#if CGAL_VERSION_NR < 1050500000 typedef CGAL::Modifiable_priority_queue PQ; +#else + typedef CGAL::Modifiable_priority_queue PQ; +#endif + typedef bool pq_handle; -- cgit v1.2.3