summaryrefslogtreecommitdiff
path: root/test/test_optim.py
diff options
context:
space:
mode:
authorKilian <kilian.fatras@gmail.com>2019-11-15 16:10:37 +0100
committerKilian <kilian.fatras@gmail.com>2019-11-15 16:10:37 +0100
commit0280a3441b09c781035cda3b74213ec92026ff9e (patch)
tree23d068d2485b919a67696088d603e22e32d76ff0 /test/test_optim.py
parent2a32e2ea64d0d5096953a9b8259b0507fa58dca5 (diff)
fix bug numItermax emd in cg
Diffstat (limited to 'test/test_optim.py')
-rw-r--r--test/test_optim.py33
1 files changed, 33 insertions, 0 deletions
diff --git a/test/test_optim.py b/test/test_optim.py
index ae31e1f..aade36e 100644
--- a/test/test_optim.py
+++ b/test/test_optim.py
@@ -37,6 +37,39 @@ def test_conditional_gradient():
np.testing.assert_allclose(b, G.sum(0))
+def test_conditional_gradient2():
+ n = 4000 # nb samples
+
+ mu_s = np.array([0, 0])
+ cov_s = np.array([[1, 0], [0, 1]])
+
+ mu_t = np.array([4, 4])
+ cov_t = np.array([[1, -.8], [-.8, 1]])
+
+ xs = ot.datasets.make_2D_samples_gauss(n, mu_s, cov_s)
+ xt = ot.datasets.make_2D_samples_gauss(n, mu_t, cov_t)
+
+ a, b = np.ones((n,)) / n, np.ones((n,)) / n
+
+ # loss matrix
+ M = ot.dist(xs, xt)
+ M /= M.max()
+
+ def f(G):
+ return 0.5 * np.sum(G**2)
+
+ def df(G):
+ return G
+
+ reg = 1e-1
+
+ G, log = ot.optim.cg(a, b, M, reg, f, df, numItermaxEmd=200000,
+ verbose=True, log=True)
+
+ np.testing.assert_allclose(a, G.sum(1))
+ np.testing.assert_allclose(b, G.sum(0))
+
+
def test_generalized_conditional_gradient():
n_bins = 100 # nb bins