From 80e3c23bc968f866fd20344ddc443a3c7fcb3b0d Mon Sep 17 00:00:00 2001 From: Clément Bonet <32179275+clbonet@users.noreply.github.com> Date: Thu, 23 Feb 2023 08:31:01 +0100 Subject: [WIP] Wasserstein distance on the circle and Spherical Sliced-Wasserstein (#434) * W circle + SSW * Tests + Example SSW_1 * Example Wasserstein Circle + Tests * Wasserstein on the circle wrt Unif * Example SSW unif * pep8 * np.linalg.qr for numpy < 1.22 by batch + add python3.11 to tests * np qr * rm test python 3.11 * update names, tests, backend transpose * Comment error batchs * semidiscrete_wasserstein2_unif_circle example * torch permute method instead of torch.permute for previous versions * update comments and doc * doc wasserstein circle model as [0,1[ * Added ot.utils.get_coordinate_circle to get coordinates on the circle in turn --- examples/backends/plot_ssw_unif_torch.py | 153 +++++++++++++++++++++++++++++++ 1 file changed, 153 insertions(+) create mode 100644 examples/backends/plot_ssw_unif_torch.py (limited to 'examples/backends/plot_ssw_unif_torch.py') diff --git a/examples/backends/plot_ssw_unif_torch.py b/examples/backends/plot_ssw_unif_torch.py new file mode 100644 index 0000000..d1de5a9 --- /dev/null +++ b/examples/backends/plot_ssw_unif_torch.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +r""" +================================================ +Spherical Sliced-Wasserstein Embedding on Sphere +================================================ + +Here, we aim at transforming samples into a uniform +distribution on the sphere by minimizing SSW: + +.. math:: + \min_{x} SSW_2(\nu, \frac{1}{n}\sum_{i=1}^n \delta_{x_i}) + +where :math:`\nu=\mathrm{Unif}(S^1)`. + +""" + +# Author: Clément Bonet +# +# License: MIT License + +# sphinx_gallery_thumbnail_number = 3 + +import numpy as np +import matplotlib.pyplot as pl +import matplotlib.animation as animation +import torch +import torch.nn.functional as F + +import ot + + +# %% +# Data generation +# --------------- + +torch.manual_seed(1) + +N = 1000 +x0 = torch.rand(N, 3) +x0 = F.normalize(x0, dim=-1) + + +# %% +# Plot data +# --------- + +def plot_sphere(ax): + xlist = np.linspace(-1.0, 1.0, 50) + ylist = np.linspace(-1.0, 1.0, 50) + r = np.linspace(1.0, 1.0, 50) + X, Y = np.meshgrid(xlist, ylist) + + Z = np.sqrt(r**2 - X**2 - Y**2) + + ax.plot_wireframe(X, Y, Z, color="gray", alpha=.3) + ax.plot_wireframe(X, Y, -Z, color="gray", alpha=.3) # Now plot the bottom half + + +# plot the distributions +pl.figure(1) +ax = pl.axes(projection='3d') +plot_sphere(ax) +ax.scatter(x0[:, 0], x0[:, 1], x0[:, 2], label='Data samples', alpha=0.5) +ax.set_title('Data distribution') +ax.legend() + + +# %% +# Gradient descent +# ---------------- + +x = x0.clone() +x.requires_grad_(True) + +n_iter = 500 +lr = 100 + +losses = [] +xvisu = torch.zeros(n_iter, N, 3) + +for i in range(n_iter): + sw = ot.sliced_wasserstein_sphere_unif(x, n_projections=500) + grad_x = torch.autograd.grad(sw, x)[0] + + x = x - lr * grad_x + x = F.normalize(x, p=2, dim=1) + + losses.append(sw.item()) + xvisu[i, :, :] = x.detach().clone() + + if i % 100 == 0: + print("Iter: {:3d}, loss={}".format(i, losses[-1])) + +pl.figure(1) +pl.semilogy(losses) +pl.grid() +pl.title('SSW') +pl.xlabel("Iterations") + + +# %% +# Plot trajectories of generated samples along iterations +# ------------------------------------------------------- + +ivisu = [0, 25, 50, 75, 100, 150, 200, 350, 499] + +fig = pl.figure(3, (10, 10)) +for i in range(9): + # pl.subplot(3, 3, i + 1) + # ax = pl.axes(projection='3d') + ax = fig.add_subplot(3, 3, i + 1, projection='3d') + plot_sphere(ax) + ax.scatter(xvisu[ivisu[i], :, 0], xvisu[ivisu[i], :, 1], xvisu[ivisu[i], :, 2], label='Data samples', alpha=0.5) + ax.set_title('Iter. {}'.format(ivisu[i])) + #ax.axis("off") + if i == 0: + ax.legend() + + +# %% +# Animate trajectories of generated samples along iteration +# ------------------------------------------------------- + +pl.figure(4, (8, 8)) + + +def _update_plot(i): + i = 3 * i + pl.clf() + ax = pl.axes(projection='3d') + plot_sphere(ax) + ax.scatter(xvisu[i, :, 0], xvisu[i, :, 1], xvisu[i, :, 2], label='Data samples$', alpha=0.5) + ax.axis("off") + ax.set_xlim((-1.5, 1.5)) + ax.set_ylim((-1.5, 1.5)) + ax.set_title('Iter. {}'.format(i)) + return 1 + + +print(xvisu.shape) + +i = 0 +ax = pl.axes(projection='3d') +plot_sphere(ax) +ax.scatter(xvisu[i, :, 0], xvisu[i, :, 1], xvisu[i, :, 2], label='Data samples from $G\#\mu_n$', alpha=0.5) +ax.axis("off") +ax.set_xlim((-1.5, 1.5)) +ax.set_ylim((-1.5, 1.5)) +ax.set_title('Iter. {}'.format(ivisu[i])) + + +ani = animation.FuncAnimation(pl.gcf(), _update_plot, n_iter // 5, interval=100, repeat_delay=2000) +# %% -- cgit v1.2.3 From 981fbe3873d7c1c121499bf83557f6d72425bf69 Mon Sep 17 00:00:00 2001 From: Rémi Flamary Date: Fri, 24 Mar 2023 10:13:59 +0100 Subject: [WIP] Build donc in GH Action and report warnings + move contributing and code of conduct in documentation (#441) * use action for doc with wraning visible * remove space * remove space again * test pre commands * install pot properly * install compiler... * try composite action * remoe warning in sliced exmaple * pep8 * move contributing and code of conduct * cleanup * underline too short * update quickstart * replace version selector by static list to avoid jsQuery bug --- .github/CONTRIBUTING.md | 1 - .github/workflows/build_doc.yml | 44 +++++++++++++++ README.md | 2 +- docs/source/.github/CODE_OF_CONDUCT.rst | 6 --- docs/source/.github/CONTRIBUTING.rst | 6 --- docs/source/_templates/versions.html | 69 ++++++++++++------------ docs/source/all.rst | 2 +- docs/source/code_of_conduct.rst | 6 +++ docs/source/contributing.rst | 6 +++ docs/source/index.rst | 4 +- docs/source/quickstart.rst | 92 ++++++++++++++++---------------- examples/backends/plot_ssw_unif_torch.py | 2 +- 12 files changed, 142 insertions(+), 98 deletions(-) create mode 100644 .github/workflows/build_doc.yml delete mode 100644 docs/source/.github/CODE_OF_CONDUCT.rst delete mode 100644 docs/source/.github/CONTRIBUTING.rst create mode 100644 docs/source/code_of_conduct.rst create mode 100644 docs/source/contributing.rst (limited to 'examples/backends/plot_ssw_unif_torch.py') diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 9bc8e87..168ffb3 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,4 +1,3 @@ - Contributing to POT =================== diff --git a/.github/workflows/build_doc.yml b/.github/workflows/build_doc.yml new file mode 100644 index 0000000..93bd113 --- /dev/null +++ b/.github/workflows/build_doc.yml @@ -0,0 +1,44 @@ +name: Build doc + +on: + workflow_dispatch: + pull_request: + push: + branches: + - 'master' + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + # Standard drop-in approach that should work for most people. + + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + + - name: Get Python running + run: | + python -m pip install --user --upgrade --progress-bar off pip + python -m pip install --user --upgrade --progress-bar off -r requirements.txt + python -m pip install --user --upgrade --progress-bar off -r docs/requirements.txt + python -m pip install --user --upgrade --progress-bar off ipython "https://api.github.com/repos/sphinx-gallery/sphinx-gallery/zipball/master" memory_profiler + python -m pip install --user -e . + # Look at what we have and fail early if there is some library conflict + - name: Check installation + run: | + which python + python -c "import ot" + # Build docs + - name: Generate HTML docs + uses: rickstaa/sphinx-action@master + with: + docs-folder: "docs/" + - uses: actions/upload-artifact@v1 + with: + name: Documentation + path: docs/build/html/ \ No newline at end of file diff --git a/README.md b/README.md index 9c5e07e..2a81e95 100644 --- a/README.md +++ b/README.md @@ -192,7 +192,7 @@ POT has benefited from the financing or manpower from the following partners: ## Contributions and code of conduct -Every contribution is welcome and should respect the [contribution guidelines](.github/CONTRIBUTING.md). Each member of the project is expected to follow the [code of conduct](.github/CODE_OF_CONDUCT.md). +Every contribution is welcome and should respect the [contribution guidelines](https://pythonot.github.io/master/contributing.html). Each member of the project is expected to follow the [code of conduct](https://pythonot.github.io/master/code_of_conduct.html). ## Support diff --git a/docs/source/.github/CODE_OF_CONDUCT.rst b/docs/source/.github/CODE_OF_CONDUCT.rst deleted file mode 100644 index d4c5cec..0000000 --- a/docs/source/.github/CODE_OF_CONDUCT.rst +++ /dev/null @@ -1,6 +0,0 @@ -Code of Conduct -=============== - -.. include:: ../../../.github/CODE_OF_CONDUCT.md - :parser: myst_parser.sphinx_ - :start-line: 2 diff --git a/docs/source/.github/CONTRIBUTING.rst b/docs/source/.github/CONTRIBUTING.rst deleted file mode 100644 index aef24e9..0000000 --- a/docs/source/.github/CONTRIBUTING.rst +++ /dev/null @@ -1,6 +0,0 @@ -Contributing to POT -=================== - -.. include:: ../../../.github/CONTRIBUTING.md - :parser: myst_parser.sphinx_ - :start-line: 3 diff --git a/docs/source/_templates/versions.html b/docs/source/_templates/versions.html index f48ab86..5b1021a 100644 --- a/docs/source/_templates/versions.html +++ b/docs/source/_templates/versions.html @@ -1,47 +1,50 @@
- - + + Python Optimal Transport - versions - +
+ + + Versions: + Release + Development + Code + + + +
-
+ +
\ No newline at end of file diff --git a/docs/source/all.rst b/docs/source/all.rst index 1b8d13c..a9d7fe2 100644 --- a/docs/source/all.rst +++ b/docs/source/all.rst @@ -37,7 +37,7 @@ API and modules Main :py:mod:`ot` functions --------------- +--------------------------- .. automodule:: ot :members: diff --git a/docs/source/code_of_conduct.rst b/docs/source/code_of_conduct.rst new file mode 100644 index 0000000..40b432e --- /dev/null +++ b/docs/source/code_of_conduct.rst @@ -0,0 +1,6 @@ +Code of conduct +=============== + +.. include:: ../../.github/CODE_OF_CONDUCT.md + :parser: myst_parser.sphinx_ + :start-line: 2 diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst new file mode 100644 index 0000000..8dec19a --- /dev/null +++ b/docs/source/contributing.rst @@ -0,0 +1,6 @@ +Contributing to POT +=================== + +.. include:: ../../.github/CONTRIBUTING.md + :parser: myst_parser.sphinx_ + :start-line: 2 diff --git a/docs/source/index.rst b/docs/source/index.rst index 3d53ef4..0f04738 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -21,9 +21,9 @@ Contents all auto_examples/index releases - .github/CONTRIBUTING contributors - .github/CODE_OF_CONDUCT + contributing + code_of_conduct .. include:: ../../README.md diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst index c8eac30..1dc9f71 100644 --- a/docs/source/quickstart.rst +++ b/docs/source/quickstart.rst @@ -127,14 +127,6 @@ been used to solve both graph Laplacian regularization OT and Gromov Wasserstein [30]_. -.. note:: - - POT is originally designed to solve OT problems with Numpy interface and - is not yet compatible with Pytorch API. We are currently working on a torch - submodule that will provide OT solvers and losses for the most common deep - learning configurations. - - When not to use POT """"""""""""""""""" @@ -692,42 +684,8 @@ A list of the provided implementation is given in the following note. :heading-level: " -Other applications ------------------- - -We discuss in the following several OT related problems and tools that has been -proposed in the OT and machine learning community. - -Wasserstein Discriminant Analysis -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Wasserstein Discriminant Analysis [11]_ is a generalization of `Fisher Linear Discriminant -Analysis `__ that -allows discrimination between classes that are not linearly separable. It -consists in finding a linear projector optimizing the following criterion - -.. math:: - P = \text{arg}\min_P \frac{\sum_i OT_e(\mu_i\#P,\mu_i\#P)}{\sum_{i,j\neq i} - OT_e(\mu_i\#P,\mu_j\#P)} - -where :math:`\#` is the push-forward operator, :math:`OT_e` is the entropic OT -loss and :math:`\mu_i` is the -distribution of samples from class :math:`i`. :math:`P` is also constrained to -be in the Stiefel manifold. WDA can be solved in POT using function -:any:`ot.dr.wda`. It requires to have installed :code:`pymanopt` and -:code:`autograd` for manifold optimization and automatic differentiation -respectively. Note that we also provide the Fisher discriminant estimator in -:any:`ot.dr.fda` for easy comparison. - -.. warning:: - - Note that due to the hard dependency on :code:`pymanopt` and - :code:`autograd`, :any:`ot.dr` is not imported by default. If you want to - use it you have to specifically import it with :code:`import ot.dr` . - -.. minigallery:: ot.dr.wda - :add-heading: Examples of the use of WDA - :heading-level: " +Unbalanced and partial OT +------------------------- @@ -845,10 +803,11 @@ regularization of the problem. :heading-level: " +Gromov Wasserstein and extensions +--------------------------------- - -Gromov-Wasserstein -^^^^^^^^^^^^^^^^^^ +Gromov Wasserstein(GW) +^^^^^^^^^^^^^^^^^^^^^^ Gromov Wasserstein (GW) is a generalization of OT to distributions that do not lie in the same space [13]_. In this case one cannot compute distance between samples @@ -877,6 +836,8 @@ There also exists an entropic regularized variant of GW that has been proposed i :add-heading: Examples of computation of GW, regularized G and FGW :heading-level: " +Gromov Wasserstein barycenters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Note that similarly to Wasserstein distance GW allows for the definition of GW barycenters that can be expressed as @@ -905,6 +866,43 @@ The implementations of FGW and FGW barycenter is provided in functions :heading-level: " +Other applications +------------------ + +We discuss in the following several OT related problems and tools that has been +proposed in the OT and machine learning community. + +Wasserstein Discriminant Analysis +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Wasserstein Discriminant Analysis [11]_ is a generalization of `Fisher Linear Discriminant +Analysis `__ that +allows discrimination between classes that are not linearly separable. It +consists in finding a linear projector optimizing the following criterion + +.. math:: + P = \text{arg}\min_P \frac{\sum_i OT_e(\mu_i\#P,\mu_i\#P)}{\sum_{i,j\neq i} + OT_e(\mu_i\#P,\mu_j\#P)} + +where :math:`\#` is the push-forward operator, :math:`OT_e` is the entropic OT +loss and :math:`\mu_i` is the +distribution of samples from class :math:`i`. :math:`P` is also constrained to +be in the Stiefel manifold. WDA can be solved in POT using function +:any:`ot.dr.wda`. It requires to have installed :code:`pymanopt` and +:code:`autograd` for manifold optimization and automatic differentiation +respectively. Note that we also provide the Fisher discriminant estimator in +:any:`ot.dr.fda` for easy comparison. + +.. warning:: + + Note that due to the hard dependency on :code:`pymanopt` and + :code:`autograd`, :any:`ot.dr` is not imported by default. If you want to + use it you have to specifically import it with :code:`import ot.dr` . + +.. minigallery:: ot.dr.wda + :add-heading: Examples of the use of WDA + :heading-level: " + Solving OT with Multiple backends on CPU/GPU -------------------------------------------- diff --git a/examples/backends/plot_ssw_unif_torch.py b/examples/backends/plot_ssw_unif_torch.py index d1de5a9..7ccc2af 100644 --- a/examples/backends/plot_ssw_unif_torch.py +++ b/examples/backends/plot_ssw_unif_torch.py @@ -50,7 +50,7 @@ def plot_sphere(ax): r = np.linspace(1.0, 1.0, 50) X, Y = np.meshgrid(xlist, ylist) - Z = np.sqrt(r**2 - X**2 - Y**2) + Z = np.sqrt(np.maximum(r**2 - X**2 - Y**2, 0)) ax.plot_wireframe(X, Y, Z, color="gray", alpha=.3) ax.plot_wireframe(X, Y, -Z, color="gray", alpha=.3) # Now plot the bottom half -- cgit v1.2.3