diff options
author | ncassereau-idris <84033440+ncassereau-idris@users.noreply.github.com> | 2021-11-02 13:42:02 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-11-02 13:42:02 +0100 |
commit | a335324d008e8982be61d7ace937815a2bfa98f9 (patch) | |
tree | 83c7f637597f10f6f3d20b15532e53fc65b51f22 /ot/optim.py | |
parent | 0cb2b2efe901ed74c614046d250518769f870313 (diff) |
[MRG] Backend for gromov (#294)
* bregman: small correction
* gromov backend first draft
* Removing decorators
* Reworked casting method
* Bug solve
* Removing casting
* Bug solve
* toarray renamed todense ; expand_dims removed
* Warning (jax not supporting sparse matrix) moved
* Mistake corrected
* test backend
* Sparsity test for older versions of pytorch
* Trying pytorch/1.10
* Attempt to correct torch sparse bug
* Backend version of gromov tests
* Random state introduced for remaining gromov functions
* review changes
* code coverage
* Docs (first draft, to be continued)
* Gromov docs
* Prettified docs
* mistake corrected in the docs
* little change
Co-authored-by: Rémi Flamary <remi.flamary@gmail.com>
Diffstat (limited to 'ot/optim.py')
-rw-r--r-- | ot/optim.py | 22 |
1 files changed, 11 insertions, 11 deletions
diff --git a/ot/optim.py b/ot/optim.py index 34cbb17..6456c03 100644 --- a/ot/optim.py +++ b/ot/optim.py @@ -23,7 +23,7 @@ def line_search_armijo(f, xk, pk, gfk, old_fval, r""" Armijo linesearch function that works with matrices - Find an approximate minimum of :math:`f(x_k + \\alpha \cdot p_k)` that satisfies the + Find an approximate minimum of :math:`f(x_k + \alpha \cdot p_k)` that satisfies the armijo conditions. Parameters @@ -129,7 +129,7 @@ def solve_linesearch(cost, G, deltaG, Mi, f_val, .. _references-solve-linesearch: References ---------- - .. [24] Vayer Titouan, Chapel Laetitia, Flamary R{\'e}mi, Tavenard Romain and Courty Nicolas + .. [24] Vayer Titouan, Chapel Laetitia, Flamary Rémi, Tavenard Romain and Courty Nicolas "Optimal Transport for structured data with application on graphs" International Conference on Machine Learning (ICML). 2019. """ @@ -162,13 +162,13 @@ def cg(a, b, M, reg, f, df, G0=None, numItermax=200, numItermaxEmd=100000, The function solves the following optimization problem: .. math:: - \gamma = arg\min_\gamma <\gamma,M>_F + \mathrm{reg} \cdot f(\gamma) + \gamma = \mathop{\arg \min}_\gamma <\gamma, \mathbf{M}>_F + \mathrm{reg} \cdot f(\gamma) - s.t. \ \gamma 1 = a + s.t. \ \gamma \mathbf{1} &= \mathbf{a} - \gamma^T 1= b + \gamma^T \mathbf{1} &= \mathbf{b} - \gamma\geq 0 + \gamma &\geq 0 where : - :math:`\mathbf{M}` is the (`ns`, `nt`) metric cost matrix @@ -309,13 +309,13 @@ def gcg(a, b, M, reg1, reg2, f, df, G0=None, numItermax=10, The function solves the following optimization problem: .. math:: - \gamma = arg\min_\gamma <\gamma,M>_F + \mathrm{reg_1}\cdot\Omega(\gamma) + \mathrm{reg_2}\cdot f(\gamma) + \gamma = \mathop{\arg \min}_\gamma <\gamma, \mathbf{M}>_F + \mathrm{reg_1}\cdot\Omega(\gamma) + \mathrm{reg_2}\cdot f(\gamma) - s.t. \ \gamma 1 = a + s.t. \ \gamma \mathbf{1} &= \mathbf{a} - \gamma^T 1= b + \gamma^T \mathbf{1} &= \mathbf{b} - \gamma\geq 0 + \gamma &\geq 0 where : - :math:`\mathbf{M}` is the (`ns`, `nt`) metric cost matrix @@ -452,7 +452,7 @@ def solve_1d_linesearch_quad(a, b, c): .. math:: - arg\min_{0 \leq x \leq 1} f(x) = ax^{2} + bx + c + \mathop{\arg \min}_{0 \leq x \leq 1} f(x) = ax^{2} + bx + c Parameters ---------- |