summaryrefslogtreecommitdiff
path: root/src/python/doc
diff options
context:
space:
mode:
authorMathieuCarriere <mathieu.carriere3@gmail.com>2021-11-01 14:36:11 +0100
committerMathieuCarriere <mathieu.carriere3@gmail.com>2021-11-01 14:36:11 +0100
commitc4269eef025d4e6c7a763cd99b5dada647693c1d (patch)
tree802bb37fac936d817ea074aaadcb277b0d41c24a /src/python/doc
parent10be82856aee6eb7f4e704757b70c9dab6fe28b8 (diff)
fix doc
Diffstat (limited to 'src/python/doc')
-rw-r--r--src/python/doc/cubical_complex_tflow_itf_ref.rst9
-rw-r--r--src/python/doc/ls_simplex_tree_tflow_itf_ref.rst11
-rw-r--r--src/python/doc/rips_complex_tflow_itf_ref.rst8
3 files changed, 24 insertions, 4 deletions
diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst
index e85cfdc6..a907dfce 100644
--- a/src/python/doc/cubical_complex_tflow_itf_ref.rst
+++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst
@@ -10,7 +10,7 @@ TensorFlow layer for cubical persistence
Example of gradient computed from cubical persistence
-----------------------------------------------------
-.. code-block:: python
+.. testcode::
from gudhi.tensorflow import *
import numpy as np
@@ -23,9 +23,16 @@ Example of gradient computed from cubical persistence
with tf.GradientTape() as tape:
dgm = cl.call(X)
loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+
grads = tape.gradient(loss, [X])
print(grads[0].numpy())
+.. testoutput::
+
+ [[ 0. 0. 0. ]
+ [ 0. 0.5 0. ]
+ [ 0. 0. -0.5]]
+
Documentation for CubicalLayer
------------------------------
diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst
index 7baf611c..26cf1ff2 100644
--- a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst
+++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst
@@ -10,7 +10,7 @@ TensorFlow layer for lower-star persistence on simplex trees
Example of gradient computed from lower-star filtration of a simplex tree
-------------------------------------------------------------------------
-.. code-block:: python
+.. testcode::
from gudhi.tensorflow import *
import numpy as np
@@ -47,8 +47,15 @@ Example of gradient computed from lower-star filtration of a simplex tree
with tf.GradientTape() as tape:
dgm = sl.call(F)
loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+
grads = tape.gradient(loss, [F])
- print(grads[0].numpy())
+ print(grads[0].indices.numpy())
+ print(grads[0].values.numpy())
+
+.. testoutput::
+
+ [2 4]
+ [-1. 1.]
Documentation for LowerStarSimplexTreeLayer
-------------------------------------------
diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst
index 15ba4c8e..7300eba0 100644
--- a/src/python/doc/rips_complex_tflow_itf_ref.rst
+++ b/src/python/doc/rips_complex_tflow_itf_ref.rst
@@ -10,7 +10,7 @@ TensorFlow layer for Vietoris-Rips persistence
Example of gradient computed from Vietoris-Rips persistence
-----------------------------------------------------------
-.. code-block:: python
+.. testcode::
from gudhi.tensorflow import *
import numpy as np
@@ -23,9 +23,15 @@ Example of gradient computed from Vietoris-Rips persistence
with tf.GradientTape() as tape:
dgm = rl.call(X)
loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+
grads = tape.gradient(loss, [X])
print(grads[0].numpy())
+.. testoutput::
+
+ [[-0.5 -0.5]
+ [ 0.5 0.5]]
+
Documentation for RipsLayer
---------------------------