From cc723a7a3735a44491bd1085b6bb6c47272b73ed Mon Sep 17 00:00:00 2001 From: MathieuCarriere Date: Sat, 16 Apr 2022 11:21:09 +0200 Subject: fix test --- src/python/test/test_diff.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/python') diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py index bab0d10c..e0c99d07 100644 --- a/src/python/test/test_diff.py +++ b/src/python/test/test_diff.py @@ -22,7 +22,7 @@ def test_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0][0] + dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]])).sum() <= 1e-6 @@ -34,7 +34,7 @@ def test_nonsquare_cubical_diff(): cl = CubicalLayer(dimensions=[0]) with tf.GradientTape() as tape: - dgm = cl.call(X)[0][0] + dgm = cl.call(X)[0] loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0]))) grads = tape.gradient(loss, [X]) assert np.abs(grads[0].numpy()-np.array([[0.,0.5,-0.5],[0.,0.,0.]])).sum() <= 1e-6 -- cgit v1.2.3