From 7d9c5e7ef81cfb1cd4725058c09a7f683ca03eef Mon Sep 17 00:00:00 2001 From: RĂ©mi Flamary Date: Mon, 24 Jul 2017 14:58:15 +0200 Subject: add test optim --- test/test_optim.py | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 test/test_optim.py (limited to 'test/test_optim.py') diff --git a/test/test_optim.py b/test/test_optim.py new file mode 100644 index 0000000..43cba7d --- /dev/null +++ b/test/test_optim.py @@ -0,0 +1,65 @@ + + +import ot +import numpy as np + +# import pytest + + +def test_conditional_gradient(): + + n = 100 # nb bins + + # bin positions + x = np.arange(n, dtype=np.float64) + + # Gaussian distributions + a = ot.datasets.get_1D_gauss(n, m=20, s=5) # m= mean, s= std + b = ot.datasets.get_1D_gauss(n, m=60, s=10) + + # loss matrix + M = ot.dist(x.reshape((n, 1)), x.reshape((n, 1))) + M /= M.max() + + def f(G): + return 0.5 * np.sum(G**2) + + def df(G): + return G + + reg = 1e-1 + + G, log = ot.optim.cg(a, b, M, reg, f, df, verbose=True, log=True) + + assert np.allclose(a, G.sum(1)) + assert np.allclose(b, G.sum(0)) + + +def test_generalized_conditional_gradient(): + + n = 100 # nb bins + + # bin positions + x = np.arange(n, dtype=np.float64) + + # Gaussian distributions + a = ot.datasets.get_1D_gauss(n, m=20, s=5) # m= mean, s= std + b = ot.datasets.get_1D_gauss(n, m=60, s=10) + + # loss matrix + M = ot.dist(x.reshape((n, 1)), x.reshape((n, 1))) + M /= M.max() + + def f(G): + return 0.5 * np.sum(G**2) + + def df(G): + return G + + reg1 = 1e-3 + reg2 = 1e-1 + + G, log = ot.optim.gcg(a, b, M, reg1, reg2, f, df, verbose=True, log=True) + + assert np.allclose(a, G.sum(1), atol=1e-05) + assert np.allclose(b, G.sum(0), atol=1e-05) -- cgit v1.2.3