From d50d8145a5c0cf69d438b018cd5f1b914905e784 Mon Sep 17 00:00:00 2001 From: AdrienCorenflos Date: Fri, 22 Oct 2021 15:05:14 +0300 Subject: Add set_gradients method for JAX backend. (#278) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: RĂ©mi Flamary --- test/test_backend.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) (limited to 'test/test_backend.py') diff --git a/test/test_backend.py b/test/test_backend.py index bc5b00c..cbfaf94 100644 --- a/test/test_backend.py +++ b/test/test_backend.py @@ -345,7 +345,8 @@ def test_gradients_backends(): rnd = np.random.RandomState(0) v = rnd.randn(10) - c = rnd.randn(1) + c = rnd.randn() + e = rnd.randn() if torch: @@ -362,3 +363,15 @@ def test_gradients_backends(): assert torch.equal(v2.grad, v2) assert torch.equal(c2.grad, c2) + + if jax: + nx = ot.backend.JaxBackend() + with jax.checking_leaks(): + def fun(a, b, d): + val = b * nx.sum(a ** 4) + d + return nx.set_gradients(val, (a, b, d), (a, b, 2 * d)) + grad_val = jax.grad(fun, argnums=(0, 1, 2))(v, c, e) + + np.testing.assert_almost_equal(fun(v, c, e), c * np.sum(v ** 4) + e, decimal=4) + np.testing.assert_allclose(grad_val[0], v, atol=1e-4) + np.testing.assert_allclose(grad_val[2], 2 * e, atol=1e-4) -- cgit v1.2.3