summaryrefslogtreecommitdiff
path: root/src/python/test/test_diff.py
diff options
context:
space:
mode:
authorMathieuCarriere <mathieu.carriere3@gmail.com>2021-11-05 19:21:54 +0100
committerMathieuCarriere <mathieu.carriere3@gmail.com>2021-11-05 19:21:54 +0100
commitbd7134d71628958e4e281817f746b0ad7ad83d00 (patch)
tree31d46d4a6c084b11c7502b75cac59316be326877 /src/python/test/test_diff.py
parent734622d5a8816cfdaaed2aaa4b9b3212fb6a259c (diff)
modified API for multiple dimensions and finite + essential
Diffstat (limited to 'src/python/test/test_diff.py')
-rw-r--r--src/python/test/test_diff.py12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py
index 73a03697..f49eff7b 100644
--- a/src/python/test/test_diff.py
+++ b/src/python/test/test_diff.py
@@ -7,10 +7,10 @@ def test_rips_diff():
Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32)
X = tf.Variable(initial_value=Xinit, trainable=True)
- rl = RipsLayer(maximum_edge_length=2., dimension=0)
+ rl = RipsLayer(maximum_edge_length=2., dimensions=[0])
with tf.GradientTape() as tape:
- dgm = rl.call(X)
+ dgm = rl.call(X)[0][0]
loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
grads = tape.gradient(loss, [X])
assert np.abs(grads[0].numpy()-np.array([[-.5,-.5],[.5,.5]])).sum() <= 1e-6
@@ -20,10 +20,10 @@ def test_cubical_diff():
Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32)
X = tf.Variable(initial_value=Xinit, trainable=True)
- cl = CubicalLayer(dimension=0)
+ cl = CubicalLayer(dimensions=[0])
with tf.GradientTape() as tape:
- dgm = cl.call(X)
+ dgm = cl.call(X)[0]
loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
grads = tape.gradient(loss, [X])
assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6
@@ -55,10 +55,10 @@ def test_st_diff():
Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32)
F = tf.Variable(initial_value=Finit, trainable=True)
- sl = LowerStarSimplexTreeLayer(simplextree=st, dimension=0)
+ sl = LowerStarSimplexTreeLayer(simplextree=st, dimensions=[0])
with tf.GradientTape() as tape:
- dgm = sl.call(F)
+ dgm = sl.call(F)[0][0]
loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
grads = tape.gradient(loss, [F])