From c4269eef025d4e6c7a763cd99b5dada647693c1d Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Mon, 1 Nov 2021 14:36:11 +0100 Subject: fix doc --- src/python/doc/cubical_complex_tflow_itf_ref.rst | 9 ++++++++- src/python/doc/ls_simplex_tree_tflow_itf_ref.rst | 11 +++++++++-- src/python/doc/rips_complex_tflow_itf_ref.rst | 8 +++++++- 3 files changed, 24 insertions(+), 4 deletions(-) (limited to 'src/python/doc') diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst index e85cfdc6..a907dfce 100644 --- a/src/python/doc/cubical_complex_tflow_itf_ref.rst +++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst @@ -10,7 +10,7 @@ TensorFlow layer for cubical persistence Example of gradient computed from cubical persistence ----------------------------------------------------- -.. code-block:: python +.. testcode:: from gudhi.tensorflow import * import numpy as np @@ -23,9 +23,16 @@ Example of gradient computed from cubical persistence with tf.GradientTape() as tape: dgm = cl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) print(grads[0].numpy()) +.. testoutput:: + + [[ 0. 0. 0. ] + [ 0. 0.5 0. ] + [ 0. 0. -0.5]] + Documentation for CubicalLayer ------------------------------ diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst index 7baf611c..26cf1ff2 100644 --- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst +++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst @@ -10,7 +10,7 @@ TensorFlow layer for lower-star persistence on simplex trees Example of gradient computed from lower-star filtration of a simplex tree ------------------------------------------------------------------------- -.. code-block:: python +.. testcode:: from gudhi.tensorflow import * import numpy as np @@ -47,8 +47,15 @@ Example of gradient computed from lower-star filtration of a simplex tree with tf.GradientTape() as tape: dgm = sl.call(F) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [F]) - print(grads[0].numpy()) + print(grads[0].indices.numpy()) + print(grads[0].values.numpy()) + +.. testoutput:: + + [2 4] + [-1. 1.] Documentation for LowerStarSimplexTreeLayer ------------------------------------------- diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst index 15ba4c8e..7300eba0 100644 --- a/src/python/doc/rips_complex_tflow_itf_ref.rst +++ b/src/python/doc/rips_complex_tflow_itf_ref.rst @@ -10,7 +10,7 @@ TensorFlow layer for Vietoris-Rips persistence Example of gradient computed from Vietoris-Rips persistence ----------------------------------------------------------- -.. code-block:: python +.. testcode:: from gudhi.tensorflow import * import numpy as np @@ -23,9 +23,15 @@ Example of gradient computed from Vietoris-Rips persistence with tf.GradientTape() as tape: dgm = rl.call(X) loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) + grads = tape.gradient(loss, [X]) print(grads[0].numpy()) +.. testoutput:: + + [[-0.5 -0.5] + [ 0.5 0.5]] + Documentation for RipsLayer --------------------------- -- cgit v1.2.3