summaryrefslogtreecommitdiff
path: root/docs/source/auto_examples/plot_optim_OTreg.rst
diff options
context:
space:
mode:
Diffstat (limited to 'docs/source/auto_examples/plot_optim_OTreg.rst')
-rw-r--r--docs/source/auto_examples/plot_optim_OTreg.rst663
1 files changed, 0 insertions, 663 deletions
diff --git a/docs/source/auto_examples/plot_optim_OTreg.rst b/docs/source/auto_examples/plot_optim_OTreg.rst
deleted file mode 100644
index 844cba0..0000000
--- a/docs/source/auto_examples/plot_optim_OTreg.rst
+++ /dev/null
@@ -1,663 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_plot_optim_OTreg.py:
-
-
-==================================
-Regularized OT with generic solver
-==================================
-
-Illustrates the use of the generic solver for regularized OT with
-user-designed regularization term. It uses Conditional gradient as in [6] and
-generalized Conditional Gradient as proposed in [5][7].
-
-
-[5] N. Courty; R. Flamary; D. Tuia; A. Rakotomamonjy, Optimal Transport for
-Domain Adaptation, in IEEE Transactions on Pattern Analysis and Machine
-Intelligence , vol.PP, no.99, pp.1-1.
-
-[6] Ferradans, S., Papadakis, N., Peyré, G., & Aujol, J. F. (2014).
-Regularized discrete optimal transport. SIAM Journal on Imaging Sciences,
-7(3), 1853-1882.
-
-[7] Rakotomamonjy, A., Flamary, R., & Courty, N. (2015). Generalized
-conditional gradient: analysis of convergence and applications.
-arXiv preprint arXiv:1510.06567.
-
-
-
-
-
-
-.. code-block:: python
-
-
- import numpy as np
- import matplotlib.pylab as pl
- import ot
- import ot.plot
-
-
-
-
-
-
-
-Generate data
--------------
-
-
-
-.. code-block:: python
-
-
- #%% parameters
-
- n = 100 # nb bins
-
- # bin positions
- x = np.arange(n, dtype=np.float64)
-
- # Gaussian distributions
- a = ot.datasets.make_1D_gauss(n, m=20, s=5) # m= mean, s= std
- b = ot.datasets.make_1D_gauss(n, m=60, s=10)
-
- # loss matrix
- M = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)))
- M /= M.max()
-
-
-
-
-
-
-
-Solve EMD
----------
-
-
-
-.. code-block:: python
-
-
- #%% EMD
-
- G0 = ot.emd(a, b, M)
-
- pl.figure(3, figsize=(5, 5))
- ot.plot.plot1D_mat(a, b, G0, 'OT matrix G0')
-
-
-
-
-.. image:: /auto_examples/images/sphx_glr_plot_optim_OTreg_003.png
- :align: center
-
-
-
-
-Solve EMD with Frobenius norm regularization
---------------------------------------------
-
-
-
-.. code-block:: python
-
-
- #%% Example with Frobenius norm regularization
-
-
- def f(G):
- return 0.5 * np.sum(G**2)
-
-
- def df(G):
- return G
-
-
- reg = 1e-1
-
- Gl2 = ot.optim.cg(a, b, M, reg, f, df, verbose=True)
-
- pl.figure(3)
- ot.plot.plot1D_mat(a, b, Gl2, 'OT matrix Frob. reg')
-
-
-
-
-.. image:: /auto_examples/images/sphx_glr_plot_optim_OTreg_004.png
- :align: center
-
-
-.. rst-class:: sphx-glr-script-out
-
- Out::
-
- It. |Loss |Delta loss
- --------------------------------
- 0|1.760578e-01|0.000000e+00
- 1|1.669467e-01|-5.457501e-02
- 2|1.665639e-01|-2.298130e-03
- 3|1.664378e-01|-7.572776e-04
- 4|1.664077e-01|-1.811855e-04
- 5|1.663912e-01|-9.936787e-05
- 6|1.663852e-01|-3.555826e-05
- 7|1.663814e-01|-2.305693e-05
- 8|1.663785e-01|-1.760450e-05
- 9|1.663767e-01|-1.078011e-05
- 10|1.663751e-01|-9.525192e-06
- 11|1.663737e-01|-8.396466e-06
- 12|1.663727e-01|-6.086938e-06
- 13|1.663720e-01|-4.042609e-06
- 14|1.663713e-01|-4.160914e-06
- 15|1.663707e-01|-3.823502e-06
- 16|1.663702e-01|-3.022440e-06
- 17|1.663697e-01|-3.181249e-06
- 18|1.663692e-01|-2.698532e-06
- 19|1.663687e-01|-3.258253e-06
- It. |Loss |Delta loss
- --------------------------------
- 20|1.663682e-01|-2.741118e-06
- 21|1.663678e-01|-2.624135e-06
- 22|1.663673e-01|-2.645179e-06
- 23|1.663670e-01|-1.957237e-06
- 24|1.663666e-01|-2.261541e-06
- 25|1.663663e-01|-1.851305e-06
- 26|1.663660e-01|-1.942296e-06
- 27|1.663657e-01|-2.092896e-06
- 28|1.663653e-01|-1.924361e-06
- 29|1.663651e-01|-1.625455e-06
- 30|1.663648e-01|-1.641123e-06
- 31|1.663645e-01|-1.566666e-06
- 32|1.663643e-01|-1.338514e-06
- 33|1.663641e-01|-1.222711e-06
- 34|1.663639e-01|-1.221805e-06
- 35|1.663637e-01|-1.440781e-06
- 36|1.663634e-01|-1.520091e-06
- 37|1.663632e-01|-1.288193e-06
- 38|1.663630e-01|-1.123055e-06
- 39|1.663628e-01|-1.024487e-06
- It. |Loss |Delta loss
- --------------------------------
- 40|1.663627e-01|-1.079606e-06
- 41|1.663625e-01|-1.172093e-06
- 42|1.663623e-01|-1.047880e-06
- 43|1.663621e-01|-1.010577e-06
- 44|1.663619e-01|-1.064438e-06
- 45|1.663618e-01|-9.882375e-07
- 46|1.663616e-01|-8.532647e-07
- 47|1.663615e-01|-9.930189e-07
- 48|1.663613e-01|-8.728955e-07
- 49|1.663612e-01|-9.524214e-07
- 50|1.663610e-01|-9.088418e-07
- 51|1.663609e-01|-7.639430e-07
- 52|1.663608e-01|-6.662611e-07
- 53|1.663607e-01|-7.133700e-07
- 54|1.663605e-01|-7.648141e-07
- 55|1.663604e-01|-6.557516e-07
- 56|1.663603e-01|-7.304213e-07
- 57|1.663602e-01|-6.353809e-07
- 58|1.663601e-01|-7.968279e-07
- 59|1.663600e-01|-6.367159e-07
- It. |Loss |Delta loss
- --------------------------------
- 60|1.663599e-01|-5.610790e-07
- 61|1.663598e-01|-5.787466e-07
- 62|1.663596e-01|-6.937777e-07
- 63|1.663596e-01|-5.599432e-07
- 64|1.663595e-01|-5.813048e-07
- 65|1.663594e-01|-5.724600e-07
- 66|1.663593e-01|-6.081892e-07
- 67|1.663592e-01|-5.948732e-07
- 68|1.663591e-01|-4.941833e-07
- 69|1.663590e-01|-5.213739e-07
- 70|1.663589e-01|-5.127355e-07
- 71|1.663588e-01|-4.349251e-07
- 72|1.663588e-01|-5.007084e-07
- 73|1.663587e-01|-4.880265e-07
- 74|1.663586e-01|-4.931950e-07
- 75|1.663585e-01|-4.981309e-07
- 76|1.663584e-01|-3.952959e-07
- 77|1.663584e-01|-4.544857e-07
- 78|1.663583e-01|-4.237579e-07
- 79|1.663582e-01|-4.382386e-07
- It. |Loss |Delta loss
- --------------------------------
- 80|1.663582e-01|-3.646051e-07
- 81|1.663581e-01|-4.197994e-07
- 82|1.663580e-01|-4.072764e-07
- 83|1.663580e-01|-3.994645e-07
- 84|1.663579e-01|-4.842721e-07
- 85|1.663578e-01|-3.276486e-07
- 86|1.663578e-01|-3.737346e-07
- 87|1.663577e-01|-4.282043e-07
- 88|1.663576e-01|-4.020937e-07
- 89|1.663576e-01|-3.431951e-07
- 90|1.663575e-01|-3.052335e-07
- 91|1.663575e-01|-3.500538e-07
- 92|1.663574e-01|-3.063176e-07
- 93|1.663573e-01|-3.576367e-07
- 94|1.663573e-01|-3.224681e-07
- 95|1.663572e-01|-3.673221e-07
- 96|1.663572e-01|-3.635561e-07
- 97|1.663571e-01|-3.527236e-07
- 98|1.663571e-01|-2.788548e-07
- 99|1.663570e-01|-2.727141e-07
- It. |Loss |Delta loss
- --------------------------------
- 100|1.663570e-01|-3.127278e-07
- 101|1.663569e-01|-2.637504e-07
- 102|1.663569e-01|-2.922750e-07
- 103|1.663568e-01|-3.076454e-07
- 104|1.663568e-01|-2.911509e-07
- 105|1.663567e-01|-2.403398e-07
- 106|1.663567e-01|-2.439790e-07
- 107|1.663567e-01|-2.634542e-07
- 108|1.663566e-01|-2.452203e-07
- 109|1.663566e-01|-2.852991e-07
- 110|1.663565e-01|-2.165490e-07
- 111|1.663565e-01|-2.450250e-07
- 112|1.663564e-01|-2.685294e-07
- 113|1.663564e-01|-2.821800e-07
- 114|1.663564e-01|-2.237390e-07
- 115|1.663563e-01|-1.992842e-07
- 116|1.663563e-01|-2.166739e-07
- 117|1.663563e-01|-2.086064e-07
- 118|1.663562e-01|-2.435945e-07
- 119|1.663562e-01|-2.292497e-07
- It. |Loss |Delta loss
- --------------------------------
- 120|1.663561e-01|-2.366209e-07
- 121|1.663561e-01|-2.138746e-07
- 122|1.663561e-01|-2.009637e-07
- 123|1.663560e-01|-2.386258e-07
- 124|1.663560e-01|-1.927442e-07
- 125|1.663560e-01|-2.081681e-07
- 126|1.663559e-01|-1.759123e-07
- 127|1.663559e-01|-1.890771e-07
- 128|1.663559e-01|-1.971315e-07
- 129|1.663558e-01|-2.101983e-07
- 130|1.663558e-01|-2.035645e-07
- 131|1.663558e-01|-1.984492e-07
- 132|1.663557e-01|-1.849064e-07
- 133|1.663557e-01|-1.795703e-07
- 134|1.663557e-01|-1.624087e-07
- 135|1.663557e-01|-1.689557e-07
- 136|1.663556e-01|-1.644308e-07
- 137|1.663556e-01|-1.618007e-07
- 138|1.663556e-01|-1.483013e-07
- 139|1.663555e-01|-1.708771e-07
- It. |Loss |Delta loss
- --------------------------------
- 140|1.663555e-01|-2.013847e-07
- 141|1.663555e-01|-1.721217e-07
- 142|1.663554e-01|-2.027911e-07
- 143|1.663554e-01|-1.764565e-07
- 144|1.663554e-01|-1.677151e-07
- 145|1.663554e-01|-1.351982e-07
- 146|1.663553e-01|-1.423360e-07
- 147|1.663553e-01|-1.541112e-07
- 148|1.663553e-01|-1.491601e-07
- 149|1.663553e-01|-1.466407e-07
- 150|1.663552e-01|-1.801524e-07
- 151|1.663552e-01|-1.714107e-07
- 152|1.663552e-01|-1.491257e-07
- 153|1.663552e-01|-1.513799e-07
- 154|1.663551e-01|-1.354539e-07
- 155|1.663551e-01|-1.233818e-07
- 156|1.663551e-01|-1.576219e-07
- 157|1.663551e-01|-1.452791e-07
- 158|1.663550e-01|-1.262867e-07
- 159|1.663550e-01|-1.316379e-07
- It. |Loss |Delta loss
- --------------------------------
- 160|1.663550e-01|-1.295447e-07
- 161|1.663550e-01|-1.283286e-07
- 162|1.663550e-01|-1.569222e-07
- 163|1.663549e-01|-1.172942e-07
- 164|1.663549e-01|-1.399809e-07
- 165|1.663549e-01|-1.229432e-07
- 166|1.663549e-01|-1.326191e-07
- 167|1.663548e-01|-1.209694e-07
- 168|1.663548e-01|-1.372136e-07
- 169|1.663548e-01|-1.338395e-07
- 170|1.663548e-01|-1.416497e-07
- 171|1.663548e-01|-1.298576e-07
- 172|1.663547e-01|-1.190590e-07
- 173|1.663547e-01|-1.167083e-07
- 174|1.663547e-01|-1.069425e-07
- 175|1.663547e-01|-1.217780e-07
- 176|1.663547e-01|-1.140754e-07
- 177|1.663546e-01|-1.160707e-07
- 178|1.663546e-01|-1.101798e-07
- 179|1.663546e-01|-1.114904e-07
- It. |Loss |Delta loss
- --------------------------------
- 180|1.663546e-01|-1.064022e-07
- 181|1.663546e-01|-9.258231e-08
- 182|1.663546e-01|-1.213120e-07
- 183|1.663545e-01|-1.164296e-07
- 184|1.663545e-01|-1.188762e-07
- 185|1.663545e-01|-9.394153e-08
- 186|1.663545e-01|-1.028656e-07
- 187|1.663545e-01|-1.115348e-07
- 188|1.663544e-01|-9.768310e-08
- 189|1.663544e-01|-1.021806e-07
- 190|1.663544e-01|-1.086303e-07
- 191|1.663544e-01|-9.879008e-08
- 192|1.663544e-01|-1.050210e-07
- 193|1.663544e-01|-1.002463e-07
- 194|1.663543e-01|-1.062747e-07
- 195|1.663543e-01|-9.348538e-08
- 196|1.663543e-01|-7.992512e-08
- 197|1.663543e-01|-9.558020e-08
- 198|1.663543e-01|-9.993772e-08
- 199|1.663543e-01|-8.588499e-08
- It. |Loss |Delta loss
- --------------------------------
- 200|1.663543e-01|-8.737134e-08
-
-
-Solve EMD with entropic regularization
---------------------------------------
-
-
-
-.. code-block:: python
-
-
- #%% Example with entropic regularization
-
-
- def f(G):
- return np.sum(G * np.log(G))
-
-
- def df(G):
- return np.log(G) + 1.
-
-
- reg = 1e-3
-
- Ge = ot.optim.cg(a, b, M, reg, f, df, verbose=True)
-
- pl.figure(4, figsize=(5, 5))
- ot.plot.plot1D_mat(a, b, Ge, 'OT matrix Entrop. reg')
-
-
-
-
-.. image:: /auto_examples/images/sphx_glr_plot_optim_OTreg_006.png
- :align: center
-
-
-.. rst-class:: sphx-glr-script-out
-
- Out::
-
- It. |Loss |Delta loss
- --------------------------------
- 0|1.692289e-01|0.000000e+00
- 1|1.617643e-01|-4.614437e-02
- 2|1.612639e-01|-3.102965e-03
- 3|1.611291e-01|-8.371098e-04
- 4|1.610468e-01|-5.110558e-04
- 5|1.610198e-01|-1.672927e-04
- 6|1.610130e-01|-4.232417e-05
- 7|1.610090e-01|-2.513455e-05
- 8|1.610002e-01|-5.443507e-05
- 9|1.609996e-01|-3.657071e-06
- 10|1.609948e-01|-2.998735e-05
- 11|1.609695e-01|-1.569217e-04
- 12|1.609533e-01|-1.010779e-04
- 13|1.609520e-01|-8.043897e-06
- 14|1.609465e-01|-3.415246e-05
- 15|1.609386e-01|-4.898605e-05
- 16|1.609324e-01|-3.837052e-05
- 17|1.609298e-01|-1.617826e-05
- 18|1.609184e-01|-7.080015e-05
- 19|1.609083e-01|-6.273206e-05
- It. |Loss |Delta loss
- --------------------------------
- 20|1.608988e-01|-5.940805e-05
- 21|1.608853e-01|-8.380030e-05
- 22|1.608844e-01|-5.185045e-06
- 23|1.608824e-01|-1.279113e-05
- 24|1.608819e-01|-3.156821e-06
- 25|1.608783e-01|-2.205746e-05
- 26|1.608764e-01|-1.189894e-05
- 27|1.608755e-01|-5.474607e-06
- 28|1.608737e-01|-1.144227e-05
- 29|1.608676e-01|-3.775335e-05
- 30|1.608638e-01|-2.348020e-05
- 31|1.608627e-01|-6.863136e-06
- 32|1.608529e-01|-6.110230e-05
- 33|1.608487e-01|-2.641106e-05
- 34|1.608409e-01|-4.823638e-05
- 35|1.608373e-01|-2.256641e-05
- 36|1.608338e-01|-2.132444e-05
- 37|1.608310e-01|-1.786649e-05
- 38|1.608260e-01|-3.103848e-05
- 39|1.608206e-01|-3.321265e-05
- It. |Loss |Delta loss
- --------------------------------
- 40|1.608201e-01|-3.054747e-06
- 41|1.608195e-01|-4.198335e-06
- 42|1.608193e-01|-8.458736e-07
- 43|1.608159e-01|-2.153759e-05
- 44|1.608115e-01|-2.738314e-05
- 45|1.608108e-01|-3.960032e-06
- 46|1.608081e-01|-1.675447e-05
- 47|1.608072e-01|-5.976340e-06
- 48|1.608046e-01|-1.604130e-05
- 49|1.608020e-01|-1.617036e-05
- 50|1.608014e-01|-3.957795e-06
- 51|1.608011e-01|-1.292411e-06
- 52|1.607998e-01|-8.431795e-06
- 53|1.607964e-01|-2.127054e-05
- 54|1.607947e-01|-1.021878e-05
- 55|1.607947e-01|-3.560621e-07
- 56|1.607900e-01|-2.929781e-05
- 57|1.607890e-01|-5.740229e-06
- 58|1.607858e-01|-2.039550e-05
- 59|1.607836e-01|-1.319545e-05
- It. |Loss |Delta loss
- --------------------------------
- 60|1.607826e-01|-6.378947e-06
- 61|1.607808e-01|-1.145102e-05
- 62|1.607776e-01|-1.941743e-05
- 63|1.607743e-01|-2.087422e-05
- 64|1.607741e-01|-1.310249e-06
- 65|1.607738e-01|-1.682752e-06
- 66|1.607691e-01|-2.913936e-05
- 67|1.607671e-01|-1.288855e-05
- 68|1.607654e-01|-1.002448e-05
- 69|1.607641e-01|-8.209492e-06
- 70|1.607632e-01|-5.588467e-06
- 71|1.607619e-01|-8.050388e-06
- 72|1.607618e-01|-9.417493e-07
- 73|1.607598e-01|-1.210509e-05
- 74|1.607591e-01|-4.392914e-06
- 75|1.607579e-01|-7.759587e-06
- 76|1.607574e-01|-2.760280e-06
- 77|1.607556e-01|-1.146469e-05
- 78|1.607550e-01|-3.689456e-06
- 79|1.607550e-01|-4.065631e-08
- It. |Loss |Delta loss
- --------------------------------
- 80|1.607539e-01|-6.555681e-06
- 81|1.607528e-01|-7.177470e-06
- 82|1.607527e-01|-5.306068e-07
- 83|1.607514e-01|-7.816045e-06
- 84|1.607511e-01|-2.301970e-06
- 85|1.607504e-01|-4.281072e-06
- 86|1.607503e-01|-7.821886e-07
- 87|1.607480e-01|-1.403013e-05
- 88|1.607480e-01|-1.169298e-08
- 89|1.607473e-01|-4.235982e-06
- 90|1.607470e-01|-1.717105e-06
- 91|1.607470e-01|-6.148402e-09
- 92|1.607462e-01|-5.396481e-06
- 93|1.607461e-01|-5.194954e-07
- 94|1.607450e-01|-6.525707e-06
- 95|1.607442e-01|-5.332060e-06
- 96|1.607439e-01|-1.682093e-06
- 97|1.607437e-01|-1.594796e-06
- 98|1.607435e-01|-7.923812e-07
- 99|1.607420e-01|-9.738552e-06
- It. |Loss |Delta loss
- --------------------------------
- 100|1.607419e-01|-1.022448e-07
- 101|1.607419e-01|-4.865999e-07
- 102|1.607418e-01|-7.092012e-07
- 103|1.607408e-01|-5.861815e-06
- 104|1.607402e-01|-3.953266e-06
- 105|1.607395e-01|-3.969572e-06
- 106|1.607390e-01|-3.612075e-06
- 107|1.607377e-01|-7.683735e-06
- 108|1.607365e-01|-7.777599e-06
- 109|1.607364e-01|-2.335096e-07
- 110|1.607364e-01|-4.562036e-07
- 111|1.607360e-01|-2.089538e-06
- 112|1.607356e-01|-2.755355e-06
- 113|1.607349e-01|-4.501960e-06
- 114|1.607347e-01|-1.160544e-06
- 115|1.607346e-01|-6.289450e-07
- 116|1.607345e-01|-2.092146e-07
- 117|1.607336e-01|-5.990866e-06
- 118|1.607330e-01|-3.348498e-06
- 119|1.607328e-01|-1.256222e-06
- It. |Loss |Delta loss
- --------------------------------
- 120|1.607320e-01|-5.418353e-06
- 121|1.607318e-01|-8.296189e-07
- 122|1.607311e-01|-4.381608e-06
- 123|1.607310e-01|-8.913901e-07
- 124|1.607309e-01|-3.808821e-07
- 125|1.607302e-01|-4.608994e-06
- 126|1.607294e-01|-5.063777e-06
- 127|1.607290e-01|-2.532835e-06
- 128|1.607285e-01|-2.870049e-06
- 129|1.607284e-01|-4.892812e-07
- 130|1.607281e-01|-1.760452e-06
- 131|1.607279e-01|-1.727139e-06
- 132|1.607275e-01|-2.220706e-06
- 133|1.607271e-01|-2.516930e-06
- 134|1.607269e-01|-1.201434e-06
- 135|1.607269e-01|-2.183459e-09
- 136|1.607262e-01|-4.223011e-06
- 137|1.607258e-01|-2.530202e-06
- 138|1.607258e-01|-1.857260e-07
- 139|1.607256e-01|-1.401957e-06
- It. |Loss |Delta loss
- --------------------------------
- 140|1.607250e-01|-3.242751e-06
- 141|1.607247e-01|-2.308071e-06
- 142|1.607247e-01|-4.730700e-08
- 143|1.607246e-01|-4.240229e-07
- 144|1.607242e-01|-2.484810e-06
- 145|1.607238e-01|-2.539206e-06
- 146|1.607234e-01|-2.535574e-06
- 147|1.607231e-01|-1.954802e-06
- 148|1.607228e-01|-1.765447e-06
- 149|1.607228e-01|-1.620007e-08
- 150|1.607222e-01|-3.615783e-06
- 151|1.607222e-01|-8.668516e-08
- 152|1.607215e-01|-4.000673e-06
- 153|1.607213e-01|-1.774103e-06
- 154|1.607213e-01|-6.328834e-09
- 155|1.607209e-01|-2.418783e-06
- 156|1.607208e-01|-2.848492e-07
- 157|1.607207e-01|-8.836043e-07
- 158|1.607205e-01|-1.192836e-06
- 159|1.607202e-01|-1.638022e-06
- It. |Loss |Delta loss
- --------------------------------
- 160|1.607202e-01|-3.670914e-08
- 161|1.607197e-01|-3.153709e-06
- 162|1.607197e-01|-2.419565e-09
- 163|1.607194e-01|-2.136882e-06
- 164|1.607194e-01|-1.173754e-09
- 165|1.607192e-01|-8.169238e-07
- 166|1.607191e-01|-9.218755e-07
- 167|1.607189e-01|-9.459255e-07
- 168|1.607187e-01|-1.294835e-06
- 169|1.607186e-01|-5.797668e-07
- 170|1.607186e-01|-4.706272e-08
- 171|1.607183e-01|-1.753383e-06
- 172|1.607183e-01|-1.681573e-07
- 173|1.607183e-01|-2.563971e-10
-
-
-Solve EMD with Frobenius norm + entropic regularization
--------------------------------------------------------
-
-
-
-.. code-block:: python
-
-
- #%% Example with Frobenius norm + entropic regularization with gcg
-
-
- def f(G):
- return 0.5 * np.sum(G**2)
-
-
- def df(G):
- return G
-
-
- reg1 = 1e-3
- reg2 = 1e-1
-
- Gel2 = ot.optim.gcg(a, b, M, reg1, reg2, f, df, verbose=True)
-
- pl.figure(5, figsize=(5, 5))
- ot.plot.plot1D_mat(a, b, Gel2, 'OT entropic + matrix Frob. reg')
- pl.show()
-
-
-
-.. image:: /auto_examples/images/sphx_glr_plot_optim_OTreg_008.png
- :align: center
-
-
-.. rst-class:: sphx-glr-script-out
-
- Out::
-
- It. |Loss |Delta loss
- --------------------------------
- 0|1.693084e-01|0.000000e+00
- 1|1.610121e-01|-5.152589e-02
- 2|1.609378e-01|-4.622297e-04
- 3|1.609284e-01|-5.830043e-05
- 4|1.609284e-01|-1.111407e-12
-
-
-**Total running time of the script:** ( 0 minutes 1.990 seconds)
-
-
-
-.. only :: html
-
- .. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: plot_optim_OTreg.py <plot_optim_OTreg.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: plot_optim_OTreg.ipynb <plot_optim_OTreg.ipynb>`
-
-
-.. only:: html
-
- .. rst-class:: sphx-glr-signature
-
- `Gallery generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_