summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRémi Flamary <remi.flamary@gmail.com>2017-09-01 15:31:44 +0200
committerRémi Flamary <remi.flamary@gmail.com>2017-09-01 15:31:44 +0200
commit062071b20d1d40c64bb619931bd11bd28e780485 (patch)
tree74bfcd48bb65304c2a5be74c24cdff29bd82ba4b
parent212f3889b1114026765cda0134e02766daa82af2 (diff)
update example with rst titles
-rw-r--r--.gitignore2
-rw-r--r--docs/source/auto_examples/auto_examples_jupyter.zipbin91095 -> 70410 bytes
-rw-r--r--docs/source/auto_examples/auto_examples_python.zipbin62950 -> 46653 bytes
-rw-r--r--docs/source/auto_examples/demo_OT_1D_test.ipynb54
-rw-r--r--docs/source/auto_examples/demo_OT_1D_test.py71
-rw-r--r--docs/source/auto_examples/demo_OT_1D_test.rst99
-rw-r--r--docs/source/auto_examples/demo_OT_2D_sampleslarge.ipynb54
-rw-r--r--docs/source/auto_examples/demo_OT_2D_sampleslarge.py78
-rw-r--r--docs/source/auto_examples/demo_OT_2D_sampleslarge.rst106
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_001.pngbin52753 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_002.pngbin87798 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_003.pngbin167396 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_004.pngbin82929 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_001.pngbin53561 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_004.pngbin193523 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_001.pngbin237854 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_002.pngbin472911 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_001.pngbin44168 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_002.pngbin111565 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_001.pngbin237854 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_002.pngbin429859 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_1D_003.pngbin16995 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_1D_004.pngbin18923 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_001.pngbin21092 -> 20707 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_002.pngbin21310 -> 21335 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_003.pngbin9625 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_004.pngbin82376 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_005.pngbin13913 -> 9613 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_006.pngbin102963 -> 83657 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_001.pngbin14117 -> 11710 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_002.pngbin18696 -> 17184 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_003.pngbin21300 -> 38780 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_005.pngbin17184 -> 38780 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_WDA_001.pngbin56060 -> 55483 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_WDA_002.pngbin90982 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_barycenter_1D_003.pngbin108687 -> 41555 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_compute_emd_002.pngbin38746 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_optim_OTreg_005.pngbin20440 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_classes_001.pngbin50114 -> 49949 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_classes_003.pngbin194170 -> 189153 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_001.pngbin144957 -> 144945 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_003.pngbin50401 -> 50403 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_005.pngbin234337 -> 234386 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_d2_001.pngbin130439 -> 131873 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_d2_003.pngbin224757 -> 240262 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_d2_006.pngbin99742 -> 104502 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_001.pngbin35810 -> 37940 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_003.pngbin71391 -> 76017 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_001.pngbin165592 -> 165589 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_003.pngbin80722 -> 80727 bytes
-rw-r--r--docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_004.pngbin541483 -> 541463 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_2D_thumb.pngbin34799 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_classes_thumb.pngbin34581 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_color_images_thumb.pngbin52919 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_color_images_thumb.pngbin52919 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_thumb.pngbin26370 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_1D_thumb.pngbin18227 -> 18222 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_2D_samples_thumb.pngbin23844 -> 22370 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_L1_vs_L2_thumb.pngbin16407 -> 10935 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_conv_thumb.pngbin2894 -> 0 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_WDA_thumb.pngbin87834 -> 88848 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_barycenter_1D_thumb.pngbin16522 -> 16522 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_compute_emd_thumb.pngbin80805 -> 80806 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_optim_OTreg_thumb.pngbin21750 -> 3101 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_classes_thumb.pngbin30152 -> 29948 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_color_images_thumb.pngbin51085 -> 51088 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_d2_thumb.pngbin52925 -> 54746 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_colors_images_thumb.pngbin58315 -> 58321 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_thumb.pngbin18620 -> 19281 bytes
-rw-r--r--docs/source/auto_examples/images/thumb/sphx_glr_test_OT_2D_samples_stabilized_thumb.pngbin3101 -> 0 bytes
-rw-r--r--docs/source/auto_examples/index.rst64
-rw-r--r--docs/source/auto_examples/plot_OTDA_2D.ipynb54
-rw-r--r--docs/source/auto_examples/plot_OTDA_2D.py120
-rw-r--r--docs/source/auto_examples/plot_OTDA_2D.rst175
-rw-r--r--docs/source/auto_examples/plot_OTDA_classes.ipynb54
-rw-r--r--docs/source/auto_examples/plot_OTDA_classes.py112
-rw-r--r--docs/source/auto_examples/plot_OTDA_classes.rst190
-rw-r--r--docs/source/auto_examples/plot_OTDA_color_images.ipynb54
-rw-r--r--docs/source/auto_examples/plot_OTDA_color_images.py145
-rw-r--r--docs/source/auto_examples/plot_OTDA_color_images.rst191
-rw-r--r--docs/source/auto_examples/plot_OTDA_mapping.ipynb54
-rw-r--r--docs/source/auto_examples/plot_OTDA_mapping.py110
-rw-r--r--docs/source/auto_examples/plot_OTDA_mapping.rst186
-rw-r--r--docs/source/auto_examples/plot_OTDA_mapping_color_images.ipynb54
-rw-r--r--docs/source/auto_examples/plot_OTDA_mapping_color_images.py158
-rw-r--r--docs/source/auto_examples/plot_OTDA_mapping_color_images.rst246
-rw-r--r--docs/source/auto_examples/plot_OT_1D.ipynb4
-rw-r--r--docs/source/auto_examples/plot_OT_1D.py5
-rw-r--r--docs/source/auto_examples/plot_OT_1D.rst9
-rw-r--r--docs/source/auto_examples/plot_OT_2D_samples.ipynb76
-rw-r--r--docs/source/auto_examples/plot_OT_2D_samples.py18
-rw-r--r--docs/source/auto_examples/plot_OT_2D_samples.rst132
-rw-r--r--docs/source/auto_examples/plot_OT_L1_vs_L2.ipynb76
-rw-r--r--docs/source/auto_examples/plot_OT_L1_vs_L2.py280
-rw-r--r--docs/source/auto_examples/plot_OT_L1_vs_L2.rst351
-rw-r--r--docs/source/auto_examples/plot_OT_conv.ipynb54
-rw-r--r--docs/source/auto_examples/plot_OT_conv.py200
-rw-r--r--docs/source/auto_examples/plot_OT_conv.rst241
-rw-r--r--docs/source/auto_examples/plot_WDA.ipynb94
-rw-r--r--docs/source/auto_examples/plot_WDA.py27
-rw-r--r--docs/source/auto_examples/plot_WDA.rst172
-rw-r--r--docs/source/auto_examples/plot_barycenter_1D.ipynb76
-rw-r--r--docs/source/auto_examples/plot_barycenter_1D.py23
-rw-r--r--docs/source/auto_examples/plot_barycenter_1D.rst113
-rw-r--r--docs/source/auto_examples/plot_compute_emd.ipynb76
-rw-r--r--docs/source/auto_examples/plot_compute_emd.py30
-rw-r--r--docs/source/auto_examples/plot_compute_emd.rst100
-rw-r--r--docs/source/auto_examples/plot_optim_OTreg.ipynb8
-rw-r--r--docs/source/auto_examples/plot_optim_OTreg.py23
-rw-r--r--docs/source/auto_examples/plot_optim_OTreg.rst29
-rw-r--r--docs/source/auto_examples/plot_otda_classes.rst46
-rw-r--r--docs/source/auto_examples/plot_otda_color_images.ipynb18
-rw-r--r--docs/source/auto_examples/plot_otda_color_images.py66
-rw-r--r--docs/source/auto_examples/plot_otda_color_images.rst90
-rw-r--r--docs/source/auto_examples/plot_otda_d2.rst4
-rw-r--r--docs/source/auto_examples/plot_otda_mapping.ipynb14
-rw-r--r--docs/source/auto_examples/plot_otda_mapping.py35
-rw-r--r--docs/source/auto_examples/plot_otda_mapping.rst105
-rw-r--r--docs/source/auto_examples/plot_otda_mapping_colors_images.ipynb8
-rw-r--r--docs/source/auto_examples/plot_otda_mapping_colors_images.py15
-rw-r--r--docs/source/auto_examples/plot_otda_mapping_colors_images.rst85
-rw-r--r--docs/source/conf.py4
-rw-r--r--docs/source/examples.rst39
-rw-r--r--examples/README.txt2
-rw-r--r--examples/plot_OT_1D.py2
-rw-r--r--examples/plot_OT_2D_samples.py3
-rw-r--r--examples/plot_OT_L1_vs_L2.py280
-rw-r--r--examples/plot_barycenter_1D.py17
-rw-r--r--examples/plot_compute_emd.py4
-rw-r--r--examples/plot_optim_OTreg.py4
-rw-r--r--examples/plot_otda_mapping.py10
-rw-r--r--examples/plot_otda_mapping_colors_images.py6
132 files changed, 1849 insertions, 3656 deletions
diff --git a/.gitignore b/.gitignore
index 42a9aad..887a164 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,8 +5,6 @@ __pycache__/
.spyproject
# sphinx-gallery temp files
-docs/source/auto_examples/*.pickle
-docs/source/auto_examples/*.md5
docs/auto_examples/
docs/modules/
diff --git a/docs/source/auto_examples/auto_examples_jupyter.zip b/docs/source/auto_examples/auto_examples_jupyter.zip
index 92aa027..96bc0bc 100644
--- a/docs/source/auto_examples/auto_examples_jupyter.zip
+++ b/docs/source/auto_examples/auto_examples_jupyter.zip
Binary files differ
diff --git a/docs/source/auto_examples/auto_examples_python.zip b/docs/source/auto_examples/auto_examples_python.zip
index bc41a8c..6241b92 100644
--- a/docs/source/auto_examples/auto_examples_python.zip
+++ b/docs/source/auto_examples/auto_examples_python.zip
Binary files differ
diff --git a/docs/source/auto_examples/demo_OT_1D_test.ipynb b/docs/source/auto_examples/demo_OT_1D_test.ipynb
deleted file mode 100644
index 87317ea..0000000
--- a/docs/source/auto_examples/demo_OT_1D_test.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\nDemo for 1D optimal transport\n\n@author: rflamary\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import numpy as np\nimport matplotlib.pylab as pl\nimport ot\nfrom ot.datasets import get_1D_gauss as gauss\n\n\n#%% parameters\n\nn=100 # nb bins\n\n# bin positions\nx=np.arange(n,dtype=np.float64)\n\n# Gaussian distributions\na=gauss(n,m=n*.2,s=5) # m= mean, s= std\nb=gauss(n,m=n*.6,s=10)\n\n# loss matrix\nM=ot.dist(x.reshape((n,1)),x.reshape((n,1)))\nM/=M.max()\n\n#%% plot the distributions\n\npl.figure(1)\npl.plot(x,a,'b',label='Source distribution')\npl.plot(x,b,'r',label='Target distribution')\npl.legend()\n\n#%% plot distributions and loss matrix\n\npl.figure(2)\not.plot.plot1D_mat(a,b,M,'Cost matrix M')\n\n#%% EMD\n\nG0=ot.emd(a,b,M)\n\npl.figure(3)\not.plot.plot1D_mat(a,b,G0,'OT matrix G0')\n\n#%% Sinkhorn\n\nlambd=1e-3\nGs=ot.sinkhorn(a,b,M,lambd,verbose=True)\n\npl.figure(4)\not.plot.plot1D_mat(a,b,Gs,'OT matrix Sinkhorn')\n\n#%% Sinkhorn\n\nlambd=1e-4\nGss,log=ot.bregman.sinkhorn_stabilized(a,b,M,lambd,verbose=True,log=True)\nGss2,log2=ot.bregman.sinkhorn_stabilized(a,b,M,lambd,verbose=True,log=True,warmstart=log['warmstart'])\n\npl.figure(5)\not.plot.plot1D_mat(a,b,Gss,'OT matrix Sinkhorn stabilized')\n\n#%% Sinkhorn\n\nlambd=1e-11\nGss=ot.bregman.sinkhorn_epsilon_scaling(a,b,M,lambd,verbose=True)\n\npl.figure(5)\not.plot.plot1D_mat(a,b,Gss,'OT matrix Sinkhorn stabilized')"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/demo_OT_1D_test.py b/docs/source/auto_examples/demo_OT_1D_test.py
deleted file mode 100644
index 9edc377..0000000
--- a/docs/source/auto_examples/demo_OT_1D_test.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Demo for 1D optimal transport
-
-@author: rflamary
-"""
-
-import numpy as np
-import matplotlib.pylab as pl
-import ot
-from ot.datasets import get_1D_gauss as gauss
-
-
-#%% parameters
-
-n=100 # nb bins
-
-# bin positions
-x=np.arange(n,dtype=np.float64)
-
-# Gaussian distributions
-a=gauss(n,m=n*.2,s=5) # m= mean, s= std
-b=gauss(n,m=n*.6,s=10)
-
-# loss matrix
-M=ot.dist(x.reshape((n,1)),x.reshape((n,1)))
-M/=M.max()
-
-#%% plot the distributions
-
-pl.figure(1)
-pl.plot(x,a,'b',label='Source distribution')
-pl.plot(x,b,'r',label='Target distribution')
-pl.legend()
-
-#%% plot distributions and loss matrix
-
-pl.figure(2)
-ot.plot.plot1D_mat(a,b,M,'Cost matrix M')
-
-#%% EMD
-
-G0=ot.emd(a,b,M)
-
-pl.figure(3)
-ot.plot.plot1D_mat(a,b,G0,'OT matrix G0')
-
-#%% Sinkhorn
-
-lambd=1e-3
-Gs=ot.sinkhorn(a,b,M,lambd,verbose=True)
-
-pl.figure(4)
-ot.plot.plot1D_mat(a,b,Gs,'OT matrix Sinkhorn')
-
-#%% Sinkhorn
-
-lambd=1e-4
-Gss,log=ot.bregman.sinkhorn_stabilized(a,b,M,lambd,verbose=True,log=True)
-Gss2,log2=ot.bregman.sinkhorn_stabilized(a,b,M,lambd,verbose=True,log=True,warmstart=log['warmstart'])
-
-pl.figure(5)
-ot.plot.plot1D_mat(a,b,Gss,'OT matrix Sinkhorn stabilized')
-
-#%% Sinkhorn
-
-lambd=1e-11
-Gss=ot.bregman.sinkhorn_epsilon_scaling(a,b,M,lambd,verbose=True)
-
-pl.figure(5)
-ot.plot.plot1D_mat(a,b,Gss,'OT matrix Sinkhorn stabilized')
diff --git a/docs/source/auto_examples/demo_OT_1D_test.rst b/docs/source/auto_examples/demo_OT_1D_test.rst
deleted file mode 100644
index aebeb1d..0000000
--- a/docs/source/auto_examples/demo_OT_1D_test.rst
+++ /dev/null
@@ -1,99 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_demo_OT_1D_test.py:
-
-
-Demo for 1D optimal transport
-
-@author: rflamary
-
-
-
-.. code-block:: python
-
-
- import numpy as np
- import matplotlib.pylab as pl
- import ot
- from ot.datasets import get_1D_gauss as gauss
-
-
- #%% parameters
-
- n=100 # nb bins
-
- # bin positions
- x=np.arange(n,dtype=np.float64)
-
- # Gaussian distributions
- a=gauss(n,m=n*.2,s=5) # m= mean, s= std
- b=gauss(n,m=n*.6,s=10)
-
- # loss matrix
- M=ot.dist(x.reshape((n,1)),x.reshape((n,1)))
- M/=M.max()
-
- #%% plot the distributions
-
- pl.figure(1)
- pl.plot(x,a,'b',label='Source distribution')
- pl.plot(x,b,'r',label='Target distribution')
- pl.legend()
-
- #%% plot distributions and loss matrix
-
- pl.figure(2)
- ot.plot.plot1D_mat(a,b,M,'Cost matrix M')
-
- #%% EMD
-
- G0=ot.emd(a,b,M)
-
- pl.figure(3)
- ot.plot.plot1D_mat(a,b,G0,'OT matrix G0')
-
- #%% Sinkhorn
-
- lambd=1e-3
- Gs=ot.sinkhorn(a,b,M,lambd,verbose=True)
-
- pl.figure(4)
- ot.plot.plot1D_mat(a,b,Gs,'OT matrix Sinkhorn')
-
- #%% Sinkhorn
-
- lambd=1e-4
- Gss,log=ot.bregman.sinkhorn_stabilized(a,b,M,lambd,verbose=True,log=True)
- Gss2,log2=ot.bregman.sinkhorn_stabilized(a,b,M,lambd,verbose=True,log=True,warmstart=log['warmstart'])
-
- pl.figure(5)
- ot.plot.plot1D_mat(a,b,Gss,'OT matrix Sinkhorn stabilized')
-
- #%% Sinkhorn
-
- lambd=1e-11
- Gss=ot.bregman.sinkhorn_epsilon_scaling(a,b,M,lambd,verbose=True)
-
- pl.figure(5)
- ot.plot.plot1D_mat(a,b,Gss,'OT matrix Sinkhorn stabilized')
-
-**Total running time of the script:** ( 0 minutes 0.000 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: demo_OT_1D_test.py <demo_OT_1D_test.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: demo_OT_1D_test.ipynb <demo_OT_1D_test.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/demo_OT_2D_sampleslarge.ipynb b/docs/source/auto_examples/demo_OT_2D_sampleslarge.ipynb
deleted file mode 100644
index 584a936..0000000
--- a/docs/source/auto_examples/demo_OT_2D_sampleslarge.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\nDemo for 2D Optimal transport between empirical distributions\n\n@author: rflamary\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import numpy as np\nimport matplotlib.pylab as pl\nimport ot\n\n#%% parameters and data generation\n\nn=5000 # nb samples\n\nmu_s=np.array([0,0])\ncov_s=np.array([[1,0],[0,1]])\n\nmu_t=np.array([4,4])\ncov_t=np.array([[1,-.8],[-.8,1]])\n\nxs=ot.datasets.get_2D_samples_gauss(n,mu_s,cov_s)\nxt=ot.datasets.get_2D_samples_gauss(n,mu_t,cov_t)\n\na,b = ot.unif(n),ot.unif(n) # uniform distribution on samples\n\n# loss matrix\nM=ot.dist(xs,xt)\nM/=M.max()\n\n#%% plot samples\n\n#pl.figure(1)\n#pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')\n#pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')\n#pl.legend(loc=0)\n#pl.title('Source and traget distributions')\n#\n#pl.figure(2)\n#pl.imshow(M,interpolation='nearest')\n#pl.title('Cost matrix M')\n#\n\n#%% EMD\n\nG0=ot.emd(a,b,M)\n\n#pl.figure(3)\n#pl.imshow(G0,interpolation='nearest')\n#pl.title('OT matrix G0')\n#\n#pl.figure(4)\n#ot.plot.plot2D_samples_mat(xs,xt,G0,c=[.5,.5,1])\n#pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')\n#pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')\n#pl.legend(loc=0)\n#pl.title('OT matrix with samples')\n\n\n#%% sinkhorn\n\n# reg term\nlambd=5e-3\n\nGs=ot.sinkhorn(a,b,M,lambd)\n\n#pl.figure(5)\n#pl.imshow(Gs,interpolation='nearest')\n#pl.title('OT matrix sinkhorn')\n#\n#pl.figure(6)\n#ot.plot.plot2D_samples_mat(xs,xt,Gs,color=[.5,.5,1])\n#pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')\n#pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')\n#pl.legend(loc=0)\n#pl.title('OT matrix Sinkhorn with samples')\n#"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/demo_OT_2D_sampleslarge.py b/docs/source/auto_examples/demo_OT_2D_sampleslarge.py
deleted file mode 100644
index ee3e8f7..0000000
--- a/docs/source/auto_examples/demo_OT_2D_sampleslarge.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Demo for 2D Optimal transport between empirical distributions
-
-@author: rflamary
-"""
-
-import numpy as np
-import matplotlib.pylab as pl
-import ot
-
-#%% parameters and data generation
-
-n=5000 # nb samples
-
-mu_s=np.array([0,0])
-cov_s=np.array([[1,0],[0,1]])
-
-mu_t=np.array([4,4])
-cov_t=np.array([[1,-.8],[-.8,1]])
-
-xs=ot.datasets.get_2D_samples_gauss(n,mu_s,cov_s)
-xt=ot.datasets.get_2D_samples_gauss(n,mu_t,cov_t)
-
-a,b = ot.unif(n),ot.unif(n) # uniform distribution on samples
-
-# loss matrix
-M=ot.dist(xs,xt)
-M/=M.max()
-
-#%% plot samples
-
-#pl.figure(1)
-#pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')
-#pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')
-#pl.legend(loc=0)
-#pl.title('Source and traget distributions')
-#
-#pl.figure(2)
-#pl.imshow(M,interpolation='nearest')
-#pl.title('Cost matrix M')
-#
-
-#%% EMD
-
-G0=ot.emd(a,b,M)
-
-#pl.figure(3)
-#pl.imshow(G0,interpolation='nearest')
-#pl.title('OT matrix G0')
-#
-#pl.figure(4)
-#ot.plot.plot2D_samples_mat(xs,xt,G0,c=[.5,.5,1])
-#pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')
-#pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')
-#pl.legend(loc=0)
-#pl.title('OT matrix with samples')
-
-
-#%% sinkhorn
-
-# reg term
-lambd=5e-3
-
-Gs=ot.sinkhorn(a,b,M,lambd)
-
-#pl.figure(5)
-#pl.imshow(Gs,interpolation='nearest')
-#pl.title('OT matrix sinkhorn')
-#
-#pl.figure(6)
-#ot.plot.plot2D_samples_mat(xs,xt,Gs,color=[.5,.5,1])
-#pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')
-#pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')
-#pl.legend(loc=0)
-#pl.title('OT matrix Sinkhorn with samples')
-#
-
diff --git a/docs/source/auto_examples/demo_OT_2D_sampleslarge.rst b/docs/source/auto_examples/demo_OT_2D_sampleslarge.rst
deleted file mode 100644
index f5dbb0d..0000000
--- a/docs/source/auto_examples/demo_OT_2D_sampleslarge.rst
+++ /dev/null
@@ -1,106 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_demo_OT_2D_sampleslarge.py:
-
-
-Demo for 2D Optimal transport between empirical distributions
-
-@author: rflamary
-
-
-
-.. code-block:: python
-
-
- import numpy as np
- import matplotlib.pylab as pl
- import ot
-
- #%% parameters and data generation
-
- n=5000 # nb samples
-
- mu_s=np.array([0,0])
- cov_s=np.array([[1,0],[0,1]])
-
- mu_t=np.array([4,4])
- cov_t=np.array([[1,-.8],[-.8,1]])
-
- xs=ot.datasets.get_2D_samples_gauss(n,mu_s,cov_s)
- xt=ot.datasets.get_2D_samples_gauss(n,mu_t,cov_t)
-
- a,b = ot.unif(n),ot.unif(n) # uniform distribution on samples
-
- # loss matrix
- M=ot.dist(xs,xt)
- M/=M.max()
-
- #%% plot samples
-
- #pl.figure(1)
- #pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')
- #pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')
- #pl.legend(loc=0)
- #pl.title('Source and traget distributions')
- #
- #pl.figure(2)
- #pl.imshow(M,interpolation='nearest')
- #pl.title('Cost matrix M')
- #
-
- #%% EMD
-
- G0=ot.emd(a,b,M)
-
- #pl.figure(3)
- #pl.imshow(G0,interpolation='nearest')
- #pl.title('OT matrix G0')
- #
- #pl.figure(4)
- #ot.plot.plot2D_samples_mat(xs,xt,G0,c=[.5,.5,1])
- #pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')
- #pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')
- #pl.legend(loc=0)
- #pl.title('OT matrix with samples')
-
-
- #%% sinkhorn
-
- # reg term
- lambd=5e-3
-
- Gs=ot.sinkhorn(a,b,M,lambd)
-
- #pl.figure(5)
- #pl.imshow(Gs,interpolation='nearest')
- #pl.title('OT matrix sinkhorn')
- #
- #pl.figure(6)
- #ot.plot.plot2D_samples_mat(xs,xt,Gs,color=[.5,.5,1])
- #pl.plot(xs[:,0],xs[:,1],'+b',label='Source samples')
- #pl.plot(xt[:,0],xt[:,1],'xr',label='Target samples')
- #pl.legend(loc=0)
- #pl.title('OT matrix Sinkhorn with samples')
- #
-
-
-**Total running time of the script:** ( 0 minutes 0.000 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: demo_OT_2D_sampleslarge.py <demo_OT_2D_sampleslarge.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: demo_OT_2D_sampleslarge.ipynb <demo_OT_2D_sampleslarge.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_001.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_001.png
deleted file mode 100644
index 7de2b45..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_001.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_002.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_002.png
deleted file mode 100644
index dc34efd..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_002.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_003.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_003.png
deleted file mode 100644
index fbd72d5..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_003.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_004.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_004.png
deleted file mode 100644
index 227812d..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_2D_004.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_001.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_001.png
deleted file mode 100644
index 2bf4015..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_001.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_004.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_004.png
deleted file mode 100644
index c1fbf57..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_classes_004.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_001.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_001.png
deleted file mode 100644
index 36bc769..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_001.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_002.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_002.png
deleted file mode 100644
index 307e384..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_color_images_002.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_001.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_001.png
deleted file mode 100644
index 8c700ee..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_001.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_002.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_002.png
deleted file mode 100644
index 792b404..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_002.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_001.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_001.png
deleted file mode 100644
index 36bc769..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_001.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_002.png b/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_002.png
deleted file mode 100644
index 008bf15..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_002.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_1D_003.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_1D_003.png
deleted file mode 100644
index a75e649..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_1D_003.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_1D_004.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_1D_004.png
deleted file mode 100644
index 96b42cd..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_1D_004.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_001.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_001.png
index e675cd8..172d736 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_002.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_002.png
index 8c29b7b..3043a72 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_002.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_002.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_003.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_003.png
deleted file mode 100644
index 1308674..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_003.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_004.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_004.png
deleted file mode 100644
index 95d947e..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_004.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_005.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_005.png
index 82ab78c..5565d75 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_005.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_005.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_006.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_006.png
index f1d2bfe..06d1020 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_006.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_2D_samples_006.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_001.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_001.png
index 22dba2b..6a21f35 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_002.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_002.png
index 5dbf96b..79e4710 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_002.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_002.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_003.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_003.png
index e1e9ba8..4860d96 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_003.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_003.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_005.png b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_005.png
index 79e4710..4860d96 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_005.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_005.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_WDA_001.png b/docs/source/auto_examples/images/sphx_glr_plot_WDA_001.png
index 41ec230..a9fff75 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_WDA_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_WDA_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_WDA_002.png b/docs/source/auto_examples/images/sphx_glr_plot_WDA_002.png
deleted file mode 100644
index 95ee7ca..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_WDA_002.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_barycenter_1D_003.png b/docs/source/auto_examples/images/sphx_glr_plot_barycenter_1D_003.png
index eac9230..3b23af5 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_barycenter_1D_003.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_barycenter_1D_003.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_compute_emd_002.png b/docs/source/auto_examples/images/sphx_glr_plot_compute_emd_002.png
deleted file mode 100644
index 7c06255..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_compute_emd_002.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_optim_OTreg_005.png b/docs/source/auto_examples/images/sphx_glr_plot_optim_OTreg_005.png
deleted file mode 100644
index 8a4882a..0000000
--- a/docs/source/auto_examples/images/sphx_glr_plot_optim_OTreg_005.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_001.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_001.png
index 45a823d..a28f245 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_003.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_003.png
index 2798f3b..4d0b12d 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_003.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_classes_003.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_001.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_001.png
index 95f882a..2d851c7 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_003.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_003.png
index aa1a5d3..a1d99ab 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_003.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_003.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_005.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_005.png
index b43c0cb..f76619b 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_005.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_color_images_005.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_001.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_001.png
index bc583a8..9e78aed 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_003.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_003.png
index 7d85e76..d37359b 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_003.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_003.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_006.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_006.png
index fba820a..c71284a 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_006.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_d2_006.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_001.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_001.png
index 03a3130..d2ee139 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_003.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_003.png
index 9c9be23..fa1ab81 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_003.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_003.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_001.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_001.png
index 33134fc..1182082 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_001.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_001.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_003.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_003.png
index 42197e3..cc2e4cd 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_003.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_003.png
Binary files differ
diff --git a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_004.png b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_004.png
index ebf268b..7a68343 100644
--- a/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_004.png
+++ b/docs/source/auto_examples/images/sphx_glr_plot_otda_mapping_colors_images_004.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_2D_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_2D_thumb.png
deleted file mode 100644
index d15269d..0000000
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_2D_thumb.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_classes_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_classes_thumb.png
deleted file mode 100644
index 5863d02..0000000
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_classes_thumb.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_color_images_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_color_images_thumb.png
deleted file mode 100644
index 5bb43c4..0000000
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_color_images_thumb.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_color_images_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_color_images_thumb.png
deleted file mode 100644
index 5bb43c4..0000000
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_color_images_thumb.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_thumb.png
deleted file mode 100644
index c3d9a65..0000000
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OTDA_mapping_thumb.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_1D_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_1D_thumb.png
index 63ff40c..a3b7039 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_1D_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_1D_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_2D_samples_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_2D_samples_thumb.png
index 48e1449..1f42900 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_2D_samples_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_2D_samples_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_L1_vs_L2_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_L1_vs_L2_thumb.png
index 9deebf0..95588f5 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_L1_vs_L2_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_L1_vs_L2_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_conv_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_conv_thumb.png
deleted file mode 100644
index 3015582..0000000
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_OT_conv_thumb.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_WDA_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_WDA_thumb.png
index 4b409a0..8db2b9a 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_WDA_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_WDA_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_barycenter_1D_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_barycenter_1D_thumb.png
index 5c17671..d8cdccb 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_barycenter_1D_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_barycenter_1D_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_compute_emd_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_compute_emd_thumb.png
index 68cbdf7..898cd72 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_compute_emd_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_compute_emd_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_optim_OTreg_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_optim_OTreg_thumb.png
index 2a72060..cbc8e0f 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_optim_OTreg_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_optim_OTreg_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_classes_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_classes_thumb.png
index 3dfc6ca..a2571a5 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_classes_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_classes_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_color_images_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_color_images_thumb.png
index a919055..16b7572 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_color_images_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_color_images_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_d2_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_d2_thumb.png
index d9d673c..6c8f37f 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_d2_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_d2_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_colors_images_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_colors_images_thumb.png
index f7fd217..9666955 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_colors_images_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_colors_images_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_thumb.png
index 4ab5023..a042411 100644
--- a/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_thumb.png
+++ b/docs/source/auto_examples/images/thumb/sphx_glr_plot_otda_mapping_thumb.png
Binary files differ
diff --git a/docs/source/auto_examples/images/thumb/sphx_glr_test_OT_2D_samples_stabilized_thumb.png b/docs/source/auto_examples/images/thumb/sphx_glr_test_OT_2D_samples_stabilized_thumb.png
deleted file mode 100644
index cbc8e0f..0000000
--- a/docs/source/auto_examples/images/thumb/sphx_glr_test_OT_2D_samples_stabilized_thumb.png
+++ /dev/null
Binary files differ
diff --git a/docs/source/auto_examples/index.rst b/docs/source/auto_examples/index.rst
index b932907..ebcca85 100644
--- a/docs/source/auto_examples/index.rst
+++ b/docs/source/auto_examples/index.rst
@@ -1,9 +1,15 @@
+:orphan:
+
POT Examples
============
+This is a gallery of all the POT example files.
+
+
+
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="">
+ <div class="sphx-glr-thumbcontainer" tooltip="This example illustrates the computation of EMD and Sinkhorn transport plans and their visuali...">
.. only:: html
@@ -23,7 +29,7 @@ POT Examples
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip=" ">
+ <div class="sphx-glr-thumbcontainer" tooltip="Illustrates the use of the generic solver for regularized OT with user-designed regularization ...">
.. only:: html
@@ -43,7 +49,7 @@ POT Examples
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="">
+ <div class="sphx-glr-thumbcontainer" tooltip="Illustration of 2D optimal transport between discributions that are weighted sum of diracs. The...">
.. only:: html
@@ -63,7 +69,7 @@ POT Examples
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="">
+ <div class="sphx-glr-thumbcontainer" tooltip="Shows how to compute multiple EMD and Sinkhorn with two differnt ground metrics and plot their...">
.. only:: html
@@ -83,7 +89,7 @@ POT Examples
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="">
+ <div class="sphx-glr-thumbcontainer" tooltip="This example illustrate the use of WDA as proposed in [11].">
.. only:: html
@@ -123,7 +129,7 @@ POT Examples
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="">
+ <div class="sphx-glr-thumbcontainer" tooltip="This example illustrates the computation of regularized Wassersyein Barycenter as proposed in ...">
.. only:: html
@@ -143,13 +149,13 @@ POT Examples
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="Stole the figure idea from Fig. 1 and 2 in https://arxiv.org/pdf/1706.07650.pdf">
+ <div class="sphx-glr-thumbcontainer" tooltip="OT for domain adaptation with image color adaptation [6] with mapping estimation [8].">
.. only:: html
- .. figure:: /auto_examples/images/thumb/sphx_glr_plot_OT_L1_vs_L2_thumb.png
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_mapping_colors_images_thumb.png
- :ref:`sphx_glr_auto_examples_plot_OT_L1_vs_L2.py`
+ :ref:`sphx_glr_auto_examples_plot_otda_mapping_colors_images.py`
.. raw:: html
@@ -159,17 +165,17 @@ POT Examples
.. toctree::
:hidden:
- /auto_examples/plot_OT_L1_vs_L2
+ /auto_examples/plot_otda_mapping_colors_images
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized discrete op...">
+ <div class="sphx-glr-thumbcontainer" tooltip="This example presents how to use MappingTransport to estimate at the same time both the couplin...">
.. only:: html
- .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_mapping_colors_images_thumb.png
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_mapping_thumb.png
- :ref:`sphx_glr_auto_examples_plot_otda_mapping_colors_images.py`
+ :ref:`sphx_glr_auto_examples_plot_otda_mapping.py`
.. raw:: html
@@ -179,17 +185,17 @@ POT Examples
.. toctree::
:hidden:
- /auto_examples/plot_otda_mapping_colors_images
+ /auto_examples/plot_otda_mapping
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="This example presents how to use MappingTransport to estimate at the same time both the couplin...">
+ <div class="sphx-glr-thumbcontainer" tooltip="This example introduces a domain adaptation in a 2D setting and the 4 OTDA approaches currently...">
.. only:: html
- .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_mapping_thumb.png
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_classes_thumb.png
- :ref:`sphx_glr_auto_examples_plot_otda_mapping.py`
+ :ref:`sphx_glr_auto_examples_plot_otda_classes.py`
.. raw:: html
@@ -199,17 +205,17 @@ POT Examples
.. toctree::
:hidden:
- /auto_examples/plot_otda_mapping
+ /auto_examples/plot_otda_classes
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="This example introduces a domain adaptation in a 2D setting and the 4 OTDA approaches currently...">
+ <div class="sphx-glr-thumbcontainer" tooltip="This example introduces a domain adaptation in a 2D setting. It explicits the problem of domain...">
.. only:: html
- .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_classes_thumb.png
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_d2_thumb.png
- :ref:`sphx_glr_auto_examples_plot_otda_classes.py`
+ :ref:`sphx_glr_auto_examples_plot_otda_d2.py`
.. raw:: html
@@ -219,17 +225,17 @@ POT Examples
.. toctree::
:hidden:
- /auto_examples/plot_otda_classes
+ /auto_examples/plot_otda_d2
.. raw:: html
- <div class="sphx-glr-thumbcontainer" tooltip="This example introduces a domain adaptation in a 2D setting. It explicits the problem of domain...">
+ <div class="sphx-glr-thumbcontainer" tooltip="2D OT on empirical distributio with different gound metric.">
.. only:: html
- .. figure:: /auto_examples/images/thumb/sphx_glr_plot_otda_d2_thumb.png
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_OT_L1_vs_L2_thumb.png
- :ref:`sphx_glr_auto_examples_plot_otda_d2.py`
+ :ref:`sphx_glr_auto_examples_plot_OT_L1_vs_L2.py`
.. raw:: html
@@ -239,7 +245,7 @@ POT Examples
.. toctree::
:hidden:
- /auto_examples/plot_otda_d2
+ /auto_examples/plot_OT_L1_vs_L2
.. raw:: html
<div style='clear:both'></div>
@@ -251,14 +257,14 @@ POT Examples
.. container:: sphx-glr-download
- :download:`Download all examples in Python source code: auto_examples_python.zip </auto_examples/auto_examples_python.zip>`
+ :download:`Download all examples in Python source code: auto_examples_python.zip <//home/rflamary/PYTHON/POT/docs/source/auto_examples/auto_examples_python.zip>`
.. container:: sphx-glr-download
- :download:`Download all examples in Jupyter notebooks: auto_examples_jupyter.zip </auto_examples/auto_examples_jupyter.zip>`
+ :download:`Download all examples in Jupyter notebooks: auto_examples_jupyter.zip <//home/rflamary/PYTHON/POT/docs/source/auto_examples/auto_examples_jupyter.zip>`
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OTDA_2D.ipynb b/docs/source/auto_examples/plot_OTDA_2D.ipynb
deleted file mode 100644
index 2ffb256..0000000
--- a/docs/source/auto_examples/plot_OTDA_2D.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n# OT for empirical distributions\n\n\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import numpy as np\nimport matplotlib.pylab as pl\nimport ot\n\n\n\n#%% parameters\n\nn=150 # nb bins\n\nxs,ys=ot.datasets.get_data_classif('3gauss',n)\nxt,yt=ot.datasets.get_data_classif('3gauss2',n)\n\na,b = ot.unif(n),ot.unif(n)\n# loss matrix\nM=ot.dist(xs,xt)\n#M/=M.max()\n\n#%% plot samples\n\npl.figure(1)\n\npl.subplot(2,2,1)\npl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')\npl.legend(loc=0)\npl.title('Source distributions')\n\npl.subplot(2,2,2)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')\npl.legend(loc=0)\npl.title('target distributions')\n\npl.figure(2)\npl.imshow(M,interpolation='nearest')\npl.title('Cost matrix M')\n\n\n#%% OT estimation\n\n# EMD\nG0=ot.emd(a,b,M)\n\n# sinkhorn\nlambd=1e-1\nGs=ot.sinkhorn(a,b,M,lambd)\n\n\n# Group lasso regularization\nreg=1e-1\neta=1e0\nGg=ot.da.sinkhorn_lpl1_mm(a,ys.astype(np.int),b,M,reg,eta)\n\n\n#%% visu matrices\n\npl.figure(3)\n\npl.subplot(2,3,1)\npl.imshow(G0,interpolation='nearest')\npl.title('OT matrix ')\n\npl.subplot(2,3,2)\npl.imshow(Gs,interpolation='nearest')\npl.title('OT matrix Sinkhorn')\n\npl.subplot(2,3,3)\npl.imshow(Gg,interpolation='nearest')\npl.title('OT matrix Group lasso')\n\npl.subplot(2,3,4)\not.plot.plot2D_samples_mat(xs,xt,G0,c=[.5,.5,1])\npl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')\n\n\npl.subplot(2,3,5)\not.plot.plot2D_samples_mat(xs,xt,Gs,c=[.5,.5,1])\npl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')\n\npl.subplot(2,3,6)\not.plot.plot2D_samples_mat(xs,xt,Gg,c=[.5,.5,1])\npl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')\n\n#%% sample interpolation\n\nxst0=n*G0.dot(xt)\nxsts=n*Gs.dot(xt)\nxstg=n*Gg.dot(xt)\n\npl.figure(4)\npl.subplot(2,3,1)\n\n\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)\npl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Transp samples',s=30)\npl.title('Interp samples')\npl.legend(loc=0)\n\npl.subplot(2,3,2)\n\n\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)\npl.scatter(xsts[:,0],xsts[:,1],c=ys,marker='+',label='Transp samples',s=30)\npl.title('Interp samples Sinkhorn')\n\npl.subplot(2,3,3)\n\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)\npl.scatter(xstg[:,0],xstg[:,1],c=ys,marker='+',label='Transp samples',s=30)\npl.title('Interp samples Grouplasso')"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OTDA_2D.py b/docs/source/auto_examples/plot_OTDA_2D.py
deleted file mode 100644
index a1fb804..0000000
--- a/docs/source/auto_examples/plot_OTDA_2D.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-==============================
-OT for empirical distributions
-==============================
-
-"""
-
-import numpy as np
-import matplotlib.pylab as pl
-import ot
-
-
-
-#%% parameters
-
-n=150 # nb bins
-
-xs,ys=ot.datasets.get_data_classif('3gauss',n)
-xt,yt=ot.datasets.get_data_classif('3gauss2',n)
-
-a,b = ot.unif(n),ot.unif(n)
-# loss matrix
-M=ot.dist(xs,xt)
-#M/=M.max()
-
-#%% plot samples
-
-pl.figure(1)
-
-pl.subplot(2,2,1)
-pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
-pl.legend(loc=0)
-pl.title('Source distributions')
-
-pl.subplot(2,2,2)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-pl.legend(loc=0)
-pl.title('target distributions')
-
-pl.figure(2)
-pl.imshow(M,interpolation='nearest')
-pl.title('Cost matrix M')
-
-
-#%% OT estimation
-
-# EMD
-G0=ot.emd(a,b,M)
-
-# sinkhorn
-lambd=1e-1
-Gs=ot.sinkhorn(a,b,M,lambd)
-
-
-# Group lasso regularization
-reg=1e-1
-eta=1e0
-Gg=ot.da.sinkhorn_lpl1_mm(a,ys.astype(np.int),b,M,reg,eta)
-
-
-#%% visu matrices
-
-pl.figure(3)
-
-pl.subplot(2,3,1)
-pl.imshow(G0,interpolation='nearest')
-pl.title('OT matrix ')
-
-pl.subplot(2,3,2)
-pl.imshow(Gs,interpolation='nearest')
-pl.title('OT matrix Sinkhorn')
-
-pl.subplot(2,3,3)
-pl.imshow(Gg,interpolation='nearest')
-pl.title('OT matrix Group lasso')
-
-pl.subplot(2,3,4)
-ot.plot.plot2D_samples_mat(xs,xt,G0,c=[.5,.5,1])
-pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
-
-pl.subplot(2,3,5)
-ot.plot.plot2D_samples_mat(xs,xt,Gs,c=[.5,.5,1])
-pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
-pl.subplot(2,3,6)
-ot.plot.plot2D_samples_mat(xs,xt,Gg,c=[.5,.5,1])
-pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
-#%% sample interpolation
-
-xst0=n*G0.dot(xt)
-xsts=n*Gs.dot(xt)
-xstg=n*Gg.dot(xt)
-
-pl.figure(4)
-pl.subplot(2,3,1)
-
-
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)
-pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Transp samples',s=30)
-pl.title('Interp samples')
-pl.legend(loc=0)
-
-pl.subplot(2,3,2)
-
-
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)
-pl.scatter(xsts[:,0],xsts[:,1],c=ys,marker='+',label='Transp samples',s=30)
-pl.title('Interp samples Sinkhorn')
-
-pl.subplot(2,3,3)
-
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)
-pl.scatter(xstg[:,0],xstg[:,1],c=ys,marker='+',label='Transp samples',s=30)
-pl.title('Interp samples Grouplasso') \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OTDA_2D.rst b/docs/source/auto_examples/plot_OTDA_2D.rst
deleted file mode 100644
index b535bb0..0000000
--- a/docs/source/auto_examples/plot_OTDA_2D.rst
+++ /dev/null
@@ -1,175 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_plot_OTDA_2D.py:
-
-
-==============================
-OT for empirical distributions
-==============================
-
-
-
-
-
-.. rst-class:: sphx-glr-horizontal
-
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_2D_001.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_2D_002.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_2D_003.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_2D_004.png
- :scale: 47
-
-
-
-
-
-.. code-block:: python
-
-
- import numpy as np
- import matplotlib.pylab as pl
- import ot
-
-
-
- #%% parameters
-
- n=150 # nb bins
-
- xs,ys=ot.datasets.get_data_classif('3gauss',n)
- xt,yt=ot.datasets.get_data_classif('3gauss2',n)
-
- a,b = ot.unif(n),ot.unif(n)
- # loss matrix
- M=ot.dist(xs,xt)
- #M/=M.max()
-
- #%% plot samples
-
- pl.figure(1)
-
- pl.subplot(2,2,1)
- pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
- pl.legend(loc=0)
- pl.title('Source distributions')
-
- pl.subplot(2,2,2)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
- pl.legend(loc=0)
- pl.title('target distributions')
-
- pl.figure(2)
- pl.imshow(M,interpolation='nearest')
- pl.title('Cost matrix M')
-
-
- #%% OT estimation
-
- # EMD
- G0=ot.emd(a,b,M)
-
- # sinkhorn
- lambd=1e-1
- Gs=ot.sinkhorn(a,b,M,lambd)
-
-
- # Group lasso regularization
- reg=1e-1
- eta=1e0
- Gg=ot.da.sinkhorn_lpl1_mm(a,ys.astype(np.int),b,M,reg,eta)
-
-
- #%% visu matrices
-
- pl.figure(3)
-
- pl.subplot(2,3,1)
- pl.imshow(G0,interpolation='nearest')
- pl.title('OT matrix ')
-
- pl.subplot(2,3,2)
- pl.imshow(Gs,interpolation='nearest')
- pl.title('OT matrix Sinkhorn')
-
- pl.subplot(2,3,3)
- pl.imshow(Gg,interpolation='nearest')
- pl.title('OT matrix Group lasso')
-
- pl.subplot(2,3,4)
- ot.plot.plot2D_samples_mat(xs,xt,G0,c=[.5,.5,1])
- pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
-
- pl.subplot(2,3,5)
- ot.plot.plot2D_samples_mat(xs,xt,Gs,c=[.5,.5,1])
- pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
- pl.subplot(2,3,6)
- ot.plot.plot2D_samples_mat(xs,xt,Gg,c=[.5,.5,1])
- pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
- #%% sample interpolation
-
- xst0=n*G0.dot(xt)
- xsts=n*Gs.dot(xt)
- xstg=n*Gg.dot(xt)
-
- pl.figure(4)
- pl.subplot(2,3,1)
-
-
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)
- pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Transp samples',s=30)
- pl.title('Interp samples')
- pl.legend(loc=0)
-
- pl.subplot(2,3,2)
-
-
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)
- pl.scatter(xsts[:,0],xsts[:,1],c=ys,marker='+',label='Transp samples',s=30)
- pl.title('Interp samples Sinkhorn')
-
- pl.subplot(2,3,3)
-
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.5)
- pl.scatter(xstg[:,0],xstg[:,1],c=ys,marker='+',label='Transp samples',s=30)
- pl.title('Interp samples Grouplasso')
-**Total running time of the script:** ( 0 minutes 17.372 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: plot_OTDA_2D.py <plot_OTDA_2D.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: plot_OTDA_2D.ipynb <plot_OTDA_2D.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OTDA_classes.ipynb b/docs/source/auto_examples/plot_OTDA_classes.ipynb
deleted file mode 100644
index d9fcb87..0000000
--- a/docs/source/auto_examples/plot_OTDA_classes.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n# OT for domain adaptation\n\n\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import matplotlib.pylab as pl\nimport ot\n\n\n\n\n#%% parameters\n\nn=150 # nb samples in source and target datasets\n\nxs,ys=ot.datasets.get_data_classif('3gauss',n)\nxt,yt=ot.datasets.get_data_classif('3gauss2',n)\n\n\n\n\n#%% plot samples\n\npl.figure(1)\n\npl.subplot(2,2,1)\npl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')\npl.legend(loc=0)\npl.title('Source distributions')\n\npl.subplot(2,2,2)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')\npl.legend(loc=0)\npl.title('target distributions')\n\n\n#%% OT estimation\n\n# LP problem\nda_emd=ot.da.OTDA() # init class\nda_emd.fit(xs,xt) # fit distributions\nxst0=da_emd.interp() # interpolation of source samples\n\n\n# sinkhorn regularization\nlambd=1e-1\nda_entrop=ot.da.OTDA_sinkhorn()\nda_entrop.fit(xs,xt,reg=lambd)\nxsts=da_entrop.interp()\n\n# non-convex Group lasso regularization\nreg=1e-1\neta=1e0\nda_lpl1=ot.da.OTDA_lpl1()\nda_lpl1.fit(xs,ys,xt,reg=reg,eta=eta)\nxstg=da_lpl1.interp()\n\n\n# True Group lasso regularization\nreg=1e-1\neta=2e0\nda_l1l2=ot.da.OTDA_l1l2()\nda_l1l2.fit(xs,ys,xt,reg=reg,eta=eta,numItermax=20,verbose=True)\nxstgl=da_l1l2.interp()\n\n\n#%% plot interpolated source samples\npl.figure(4,(15,8))\n\nparam_img={'interpolation':'nearest','cmap':'jet'}\n\npl.subplot(2,4,1)\npl.imshow(da_emd.G,**param_img)\npl.title('OT matrix')\n\n\npl.subplot(2,4,2)\npl.imshow(da_entrop.G,**param_img)\npl.title('OT matrix sinkhorn')\n\npl.subplot(2,4,3)\npl.imshow(da_lpl1.G,**param_img)\npl.title('OT matrix non-convex Group Lasso')\n\npl.subplot(2,4,4)\npl.imshow(da_l1l2.G,**param_img)\npl.title('OT matrix Group Lasso')\n\n\npl.subplot(2,4,5)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)\npl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Transp samples',s=30)\npl.title('Interp samples')\npl.legend(loc=0)\n\npl.subplot(2,4,6)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)\npl.scatter(xsts[:,0],xsts[:,1],c=ys,marker='+',label='Transp samples',s=30)\npl.title('Interp samples Sinkhorn')\n\npl.subplot(2,4,7)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)\npl.scatter(xstg[:,0],xstg[:,1],c=ys,marker='+',label='Transp samples',s=30)\npl.title('Interp samples non-convex Group Lasso')\n\npl.subplot(2,4,8)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)\npl.scatter(xstgl[:,0],xstgl[:,1],c=ys,marker='+',label='Transp samples',s=30)\npl.title('Interp samples Group Lasso')"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OTDA_classes.py b/docs/source/auto_examples/plot_OTDA_classes.py
deleted file mode 100644
index 089b45b..0000000
--- a/docs/source/auto_examples/plot_OTDA_classes.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-========================
-OT for domain adaptation
-========================
-
-"""
-
-import matplotlib.pylab as pl
-import ot
-
-
-
-
-#%% parameters
-
-n=150 # nb samples in source and target datasets
-
-xs,ys=ot.datasets.get_data_classif('3gauss',n)
-xt,yt=ot.datasets.get_data_classif('3gauss2',n)
-
-
-
-
-#%% plot samples
-
-pl.figure(1)
-
-pl.subplot(2,2,1)
-pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
-pl.legend(loc=0)
-pl.title('Source distributions')
-
-pl.subplot(2,2,2)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-pl.legend(loc=0)
-pl.title('target distributions')
-
-
-#%% OT estimation
-
-# LP problem
-da_emd=ot.da.OTDA() # init class
-da_emd.fit(xs,xt) # fit distributions
-xst0=da_emd.interp() # interpolation of source samples
-
-
-# sinkhorn regularization
-lambd=1e-1
-da_entrop=ot.da.OTDA_sinkhorn()
-da_entrop.fit(xs,xt,reg=lambd)
-xsts=da_entrop.interp()
-
-# non-convex Group lasso regularization
-reg=1e-1
-eta=1e0
-da_lpl1=ot.da.OTDA_lpl1()
-da_lpl1.fit(xs,ys,xt,reg=reg,eta=eta)
-xstg=da_lpl1.interp()
-
-
-# True Group lasso regularization
-reg=1e-1
-eta=2e0
-da_l1l2=ot.da.OTDA_l1l2()
-da_l1l2.fit(xs,ys,xt,reg=reg,eta=eta,numItermax=20,verbose=True)
-xstgl=da_l1l2.interp()
-
-
-#%% plot interpolated source samples
-pl.figure(4,(15,8))
-
-param_img={'interpolation':'nearest','cmap':'jet'}
-
-pl.subplot(2,4,1)
-pl.imshow(da_emd.G,**param_img)
-pl.title('OT matrix')
-
-
-pl.subplot(2,4,2)
-pl.imshow(da_entrop.G,**param_img)
-pl.title('OT matrix sinkhorn')
-
-pl.subplot(2,4,3)
-pl.imshow(da_lpl1.G,**param_img)
-pl.title('OT matrix non-convex Group Lasso')
-
-pl.subplot(2,4,4)
-pl.imshow(da_l1l2.G,**param_img)
-pl.title('OT matrix Group Lasso')
-
-
-pl.subplot(2,4,5)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
-pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Transp samples',s=30)
-pl.title('Interp samples')
-pl.legend(loc=0)
-
-pl.subplot(2,4,6)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
-pl.scatter(xsts[:,0],xsts[:,1],c=ys,marker='+',label='Transp samples',s=30)
-pl.title('Interp samples Sinkhorn')
-
-pl.subplot(2,4,7)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
-pl.scatter(xstg[:,0],xstg[:,1],c=ys,marker='+',label='Transp samples',s=30)
-pl.title('Interp samples non-convex Group Lasso')
-
-pl.subplot(2,4,8)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
-pl.scatter(xstgl[:,0],xstgl[:,1],c=ys,marker='+',label='Transp samples',s=30)
-pl.title('Interp samples Group Lasso') \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OTDA_classes.rst b/docs/source/auto_examples/plot_OTDA_classes.rst
deleted file mode 100644
index 097e9fc..0000000
--- a/docs/source/auto_examples/plot_OTDA_classes.rst
+++ /dev/null
@@ -1,190 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_plot_OTDA_classes.py:
-
-
-========================
-OT for domain adaptation
-========================
-
-
-
-
-
-.. rst-class:: sphx-glr-horizontal
-
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_classes_001.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_classes_004.png
- :scale: 47
-
-
-.. rst-class:: sphx-glr-script-out
-
- Out::
-
- It. |Loss |Delta loss
- --------------------------------
- 0|9.171271e+00|0.000000e+00
- 1|2.133783e+00|-3.298127e+00
- 2|1.895941e+00|-1.254484e-01
- 3|1.844628e+00|-2.781709e-02
- 4|1.824983e+00|-1.076467e-02
- 5|1.815453e+00|-5.249337e-03
- 6|1.808104e+00|-4.064733e-03
- 7|1.803558e+00|-2.520475e-03
- 8|1.801061e+00|-1.386155e-03
- 9|1.799391e+00|-9.279565e-04
- 10|1.797176e+00|-1.232778e-03
- 11|1.795465e+00|-9.529479e-04
- 12|1.795316e+00|-8.322362e-05
- 13|1.794523e+00|-4.418932e-04
- 14|1.794444e+00|-4.390599e-05
- 15|1.794395e+00|-2.710318e-05
- 16|1.793713e+00|-3.804028e-04
- 17|1.793110e+00|-3.359479e-04
- 18|1.792829e+00|-1.569563e-04
- 19|1.792621e+00|-1.159469e-04
- It. |Loss |Delta loss
- --------------------------------
- 20|1.791334e+00|-7.187689e-04
-
-
-
-
-|
-
-
-.. code-block:: python
-
-
- import matplotlib.pylab as pl
- import ot
-
-
-
-
- #%% parameters
-
- n=150 # nb samples in source and target datasets
-
- xs,ys=ot.datasets.get_data_classif('3gauss',n)
- xt,yt=ot.datasets.get_data_classif('3gauss2',n)
-
-
-
-
- #%% plot samples
-
- pl.figure(1)
-
- pl.subplot(2,2,1)
- pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
- pl.legend(loc=0)
- pl.title('Source distributions')
-
- pl.subplot(2,2,2)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
- pl.legend(loc=0)
- pl.title('target distributions')
-
-
- #%% OT estimation
-
- # LP problem
- da_emd=ot.da.OTDA() # init class
- da_emd.fit(xs,xt) # fit distributions
- xst0=da_emd.interp() # interpolation of source samples
-
-
- # sinkhorn regularization
- lambd=1e-1
- da_entrop=ot.da.OTDA_sinkhorn()
- da_entrop.fit(xs,xt,reg=lambd)
- xsts=da_entrop.interp()
-
- # non-convex Group lasso regularization
- reg=1e-1
- eta=1e0
- da_lpl1=ot.da.OTDA_lpl1()
- da_lpl1.fit(xs,ys,xt,reg=reg,eta=eta)
- xstg=da_lpl1.interp()
-
-
- # True Group lasso regularization
- reg=1e-1
- eta=2e0
- da_l1l2=ot.da.OTDA_l1l2()
- da_l1l2.fit(xs,ys,xt,reg=reg,eta=eta,numItermax=20,verbose=True)
- xstgl=da_l1l2.interp()
-
-
- #%% plot interpolated source samples
- pl.figure(4,(15,8))
-
- param_img={'interpolation':'nearest','cmap':'jet'}
-
- pl.subplot(2,4,1)
- pl.imshow(da_emd.G,**param_img)
- pl.title('OT matrix')
-
-
- pl.subplot(2,4,2)
- pl.imshow(da_entrop.G,**param_img)
- pl.title('OT matrix sinkhorn')
-
- pl.subplot(2,4,3)
- pl.imshow(da_lpl1.G,**param_img)
- pl.title('OT matrix non-convex Group Lasso')
-
- pl.subplot(2,4,4)
- pl.imshow(da_l1l2.G,**param_img)
- pl.title('OT matrix Group Lasso')
-
-
- pl.subplot(2,4,5)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
- pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Transp samples',s=30)
- pl.title('Interp samples')
- pl.legend(loc=0)
-
- pl.subplot(2,4,6)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
- pl.scatter(xsts[:,0],xsts[:,1],c=ys,marker='+',label='Transp samples',s=30)
- pl.title('Interp samples Sinkhorn')
-
- pl.subplot(2,4,7)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
- pl.scatter(xstg[:,0],xstg[:,1],c=ys,marker='+',label='Transp samples',s=30)
- pl.title('Interp samples non-convex Group Lasso')
-
- pl.subplot(2,4,8)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=0.3)
- pl.scatter(xstgl[:,0],xstgl[:,1],c=ys,marker='+',label='Transp samples',s=30)
- pl.title('Interp samples Group Lasso')
-**Total running time of the script:** ( 0 minutes 2.225 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: plot_OTDA_classes.py <plot_OTDA_classes.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: plot_OTDA_classes.ipynb <plot_OTDA_classes.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OTDA_color_images.ipynb b/docs/source/auto_examples/plot_OTDA_color_images.ipynb
deleted file mode 100644
index d174828..0000000
--- a/docs/source/auto_examples/plot_OTDA_color_images.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n========================================================\nOT for domain adaptation with image color adaptation [6]\n========================================================\n\n[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3), 1853-1882.\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import numpy as np\nimport scipy.ndimage as spi\nimport matplotlib.pylab as pl\nimport ot\n\n\n#%% Loading images\n\nI1=spi.imread('../data/ocean_day.jpg').astype(np.float64)/256\nI2=spi.imread('../data/ocean_sunset.jpg').astype(np.float64)/256\n\n#%% Plot images\n\npl.figure(1)\n\npl.subplot(1,2,1)\npl.imshow(I1)\npl.title('Image 1')\n\npl.subplot(1,2,2)\npl.imshow(I2)\npl.title('Image 2')\n\npl.show()\n\n#%% Image conversion and dataset generation\n\ndef im2mat(I):\n \"\"\"Converts and image to matrix (one pixel per line)\"\"\"\n return I.reshape((I.shape[0]*I.shape[1],I.shape[2]))\n\ndef mat2im(X,shape):\n \"\"\"Converts back a matrix to an image\"\"\"\n return X.reshape(shape)\n\nX1=im2mat(I1)\nX2=im2mat(I2)\n\n# training samples\nnb=1000\nidx1=np.random.randint(X1.shape[0],size=(nb,))\nidx2=np.random.randint(X2.shape[0],size=(nb,))\n\nxs=X1[idx1,:]\nxt=X2[idx2,:]\n\n#%% Plot image distributions\n\n\npl.figure(2,(10,5))\n\npl.subplot(1,2,1)\npl.scatter(xs[:,0],xs[:,2],c=xs)\npl.axis([0,1,0,1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 1')\n\npl.subplot(1,2,2)\n#pl.imshow(I2)\npl.scatter(xt[:,0],xt[:,2],c=xt)\npl.axis([0,1,0,1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 2')\n\npl.show()\n\n\n\n#%% domain adaptation between images\n\n# LP problem\nda_emd=ot.da.OTDA() # init class\nda_emd.fit(xs,xt) # fit distributions\n\n\n# sinkhorn regularization\nlambd=1e-1\nda_entrop=ot.da.OTDA_sinkhorn()\nda_entrop.fit(xs,xt,reg=lambd)\n\n\n\n#%% prediction between images (using out of sample prediction as in [6])\n\nX1t=da_emd.predict(X1)\nX2t=da_emd.predict(X2,-1)\n\n\nX1te=da_entrop.predict(X1)\nX2te=da_entrop.predict(X2,-1)\n\n\ndef minmax(I):\n return np.minimum(np.maximum(I,0),1)\n\nI1t=minmax(mat2im(X1t,I1.shape))\nI2t=minmax(mat2im(X2t,I2.shape))\n\nI1te=minmax(mat2im(X1te,I1.shape))\nI2te=minmax(mat2im(X2te,I2.shape))\n\n#%% plot all images\n\npl.figure(2,(10,8))\n\npl.subplot(2,3,1)\n\npl.imshow(I1)\npl.title('Image 1')\n\npl.subplot(2,3,2)\npl.imshow(I1t)\npl.title('Image 1 Adapt')\n\n\npl.subplot(2,3,3)\npl.imshow(I1te)\npl.title('Image 1 Adapt (reg)')\n\npl.subplot(2,3,4)\n\npl.imshow(I2)\npl.title('Image 2')\n\npl.subplot(2,3,5)\npl.imshow(I2t)\npl.title('Image 2 Adapt')\n\n\npl.subplot(2,3,6)\npl.imshow(I2te)\npl.title('Image 2 Adapt (reg)')\n\npl.show()"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OTDA_color_images.py b/docs/source/auto_examples/plot_OTDA_color_images.py
deleted file mode 100644
index 68eee44..0000000
--- a/docs/source/auto_examples/plot_OTDA_color_images.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-========================================================
-OT for domain adaptation with image color adaptation [6]
-========================================================
-
-[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
-"""
-
-import numpy as np
-import scipy.ndimage as spi
-import matplotlib.pylab as pl
-import ot
-
-
-#%% Loading images
-
-I1=spi.imread('../data/ocean_day.jpg').astype(np.float64)/256
-I2=spi.imread('../data/ocean_sunset.jpg').astype(np.float64)/256
-
-#%% Plot images
-
-pl.figure(1)
-
-pl.subplot(1,2,1)
-pl.imshow(I1)
-pl.title('Image 1')
-
-pl.subplot(1,2,2)
-pl.imshow(I2)
-pl.title('Image 2')
-
-pl.show()
-
-#%% Image conversion and dataset generation
-
-def im2mat(I):
- """Converts and image to matrix (one pixel per line)"""
- return I.reshape((I.shape[0]*I.shape[1],I.shape[2]))
-
-def mat2im(X,shape):
- """Converts back a matrix to an image"""
- return X.reshape(shape)
-
-X1=im2mat(I1)
-X2=im2mat(I2)
-
-# training samples
-nb=1000
-idx1=np.random.randint(X1.shape[0],size=(nb,))
-idx2=np.random.randint(X2.shape[0],size=(nb,))
-
-xs=X1[idx1,:]
-xt=X2[idx2,:]
-
-#%% Plot image distributions
-
-
-pl.figure(2,(10,5))
-
-pl.subplot(1,2,1)
-pl.scatter(xs[:,0],xs[:,2],c=xs)
-pl.axis([0,1,0,1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 1')
-
-pl.subplot(1,2,2)
-#pl.imshow(I2)
-pl.scatter(xt[:,0],xt[:,2],c=xt)
-pl.axis([0,1,0,1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 2')
-
-pl.show()
-
-
-
-#%% domain adaptation between images
-
-# LP problem
-da_emd=ot.da.OTDA() # init class
-da_emd.fit(xs,xt) # fit distributions
-
-
-# sinkhorn regularization
-lambd=1e-1
-da_entrop=ot.da.OTDA_sinkhorn()
-da_entrop.fit(xs,xt,reg=lambd)
-
-
-
-#%% prediction between images (using out of sample prediction as in [6])
-
-X1t=da_emd.predict(X1)
-X2t=da_emd.predict(X2,-1)
-
-
-X1te=da_entrop.predict(X1)
-X2te=da_entrop.predict(X2,-1)
-
-
-def minmax(I):
- return np.minimum(np.maximum(I,0),1)
-
-I1t=minmax(mat2im(X1t,I1.shape))
-I2t=minmax(mat2im(X2t,I2.shape))
-
-I1te=minmax(mat2im(X1te,I1.shape))
-I2te=minmax(mat2im(X2te,I2.shape))
-
-#%% plot all images
-
-pl.figure(2,(10,8))
-
-pl.subplot(2,3,1)
-
-pl.imshow(I1)
-pl.title('Image 1')
-
-pl.subplot(2,3,2)
-pl.imshow(I1t)
-pl.title('Image 1 Adapt')
-
-
-pl.subplot(2,3,3)
-pl.imshow(I1te)
-pl.title('Image 1 Adapt (reg)')
-
-pl.subplot(2,3,4)
-
-pl.imshow(I2)
-pl.title('Image 2')
-
-pl.subplot(2,3,5)
-pl.imshow(I2t)
-pl.title('Image 2 Adapt')
-
-
-pl.subplot(2,3,6)
-pl.imshow(I2te)
-pl.title('Image 2 Adapt (reg)')
-
-pl.show()
diff --git a/docs/source/auto_examples/plot_OTDA_color_images.rst b/docs/source/auto_examples/plot_OTDA_color_images.rst
deleted file mode 100644
index a982a90..0000000
--- a/docs/source/auto_examples/plot_OTDA_color_images.rst
+++ /dev/null
@@ -1,191 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_plot_OTDA_color_images.py:
-
-
-========================================================
-OT for domain adaptation with image color adaptation [6]
-========================================================
-
-[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
-
-
-
-
-.. rst-class:: sphx-glr-horizontal
-
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_color_images_001.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_color_images_002.png
- :scale: 47
-
-
-
-
-
-.. code-block:: python
-
-
- import numpy as np
- import scipy.ndimage as spi
- import matplotlib.pylab as pl
- import ot
-
-
- #%% Loading images
-
- I1=spi.imread('../data/ocean_day.jpg').astype(np.float64)/256
- I2=spi.imread('../data/ocean_sunset.jpg').astype(np.float64)/256
-
- #%% Plot images
-
- pl.figure(1)
-
- pl.subplot(1,2,1)
- pl.imshow(I1)
- pl.title('Image 1')
-
- pl.subplot(1,2,2)
- pl.imshow(I2)
- pl.title('Image 2')
-
- pl.show()
-
- #%% Image conversion and dataset generation
-
- def im2mat(I):
- """Converts and image to matrix (one pixel per line)"""
- return I.reshape((I.shape[0]*I.shape[1],I.shape[2]))
-
- def mat2im(X,shape):
- """Converts back a matrix to an image"""
- return X.reshape(shape)
-
- X1=im2mat(I1)
- X2=im2mat(I2)
-
- # training samples
- nb=1000
- idx1=np.random.randint(X1.shape[0],size=(nb,))
- idx2=np.random.randint(X2.shape[0],size=(nb,))
-
- xs=X1[idx1,:]
- xt=X2[idx2,:]
-
- #%% Plot image distributions
-
-
- pl.figure(2,(10,5))
-
- pl.subplot(1,2,1)
- pl.scatter(xs[:,0],xs[:,2],c=xs)
- pl.axis([0,1,0,1])
- pl.xlabel('Red')
- pl.ylabel('Blue')
- pl.title('Image 1')
-
- pl.subplot(1,2,2)
- #pl.imshow(I2)
- pl.scatter(xt[:,0],xt[:,2],c=xt)
- pl.axis([0,1,0,1])
- pl.xlabel('Red')
- pl.ylabel('Blue')
- pl.title('Image 2')
-
- pl.show()
-
-
-
- #%% domain adaptation between images
-
- # LP problem
- da_emd=ot.da.OTDA() # init class
- da_emd.fit(xs,xt) # fit distributions
-
-
- # sinkhorn regularization
- lambd=1e-1
- da_entrop=ot.da.OTDA_sinkhorn()
- da_entrop.fit(xs,xt,reg=lambd)
-
-
-
- #%% prediction between images (using out of sample prediction as in [6])
-
- X1t=da_emd.predict(X1)
- X2t=da_emd.predict(X2,-1)
-
-
- X1te=da_entrop.predict(X1)
- X2te=da_entrop.predict(X2,-1)
-
-
- def minmax(I):
- return np.minimum(np.maximum(I,0),1)
-
- I1t=minmax(mat2im(X1t,I1.shape))
- I2t=minmax(mat2im(X2t,I2.shape))
-
- I1te=minmax(mat2im(X1te,I1.shape))
- I2te=minmax(mat2im(X2te,I2.shape))
-
- #%% plot all images
-
- pl.figure(2,(10,8))
-
- pl.subplot(2,3,1)
-
- pl.imshow(I1)
- pl.title('Image 1')
-
- pl.subplot(2,3,2)
- pl.imshow(I1t)
- pl.title('Image 1 Adapt')
-
-
- pl.subplot(2,3,3)
- pl.imshow(I1te)
- pl.title('Image 1 Adapt (reg)')
-
- pl.subplot(2,3,4)
-
- pl.imshow(I2)
- pl.title('Image 2')
-
- pl.subplot(2,3,5)
- pl.imshow(I2t)
- pl.title('Image 2 Adapt')
-
-
- pl.subplot(2,3,6)
- pl.imshow(I2te)
- pl.title('Image 2 Adapt (reg)')
-
- pl.show()
-
-**Total running time of the script:** ( 0 minutes 24.815 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: plot_OTDA_color_images.py <plot_OTDA_color_images.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: plot_OTDA_color_images.ipynb <plot_OTDA_color_images.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OTDA_mapping.ipynb b/docs/source/auto_examples/plot_OTDA_mapping.ipynb
deleted file mode 100644
index ec405af..0000000
--- a/docs/source/auto_examples/plot_OTDA_mapping.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n===============================================\nOT mapping estimation for domain adaptation [8]\n===============================================\n\n[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, \"Mapping estimation for\n discrete optimal transport\", Neural Information Processing Systems (NIPS), 2016.\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import numpy as np\nimport matplotlib.pylab as pl\nimport ot\n\n\n\n#%% dataset generation\n\nnp.random.seed(0) # makes example reproducible\n\nn=100 # nb samples in source and target datasets\ntheta=2*np.pi/20\nnz=0.1\nxs,ys=ot.datasets.get_data_classif('gaussrot',n,nz=nz)\nxt,yt=ot.datasets.get_data_classif('gaussrot',n,theta=theta,nz=nz)\n\n# one of the target mode changes its variance (no linear mapping)\nxt[yt==2]*=3\nxt=xt+4\n\n\n#%% plot samples\n\npl.figure(1,(8,5))\npl.clf()\n\npl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')\n\npl.legend(loc=0)\npl.title('Source and target distributions')\n\n\n\n#%% OT linear mapping estimation\n\neta=1e-8 # quadratic regularization for regression\nmu=1e0 # weight of the OT linear term\nbias=True # estimate a bias\n\not_mapping=ot.da.OTDA_mapping_linear()\not_mapping.fit(xs,xt,mu=mu,eta=eta,bias=bias,numItermax = 20,verbose=True)\n\nxst=ot_mapping.predict(xs) # use the estimated mapping\nxst0=ot_mapping.interp() # use barycentric mapping\n\n\npl.figure(2,(10,7))\npl.clf()\npl.subplot(2,2,1)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.3)\npl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='barycentric mapping')\npl.title(\"barycentric mapping\")\n\npl.subplot(2,2,2)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.3)\npl.scatter(xst[:,0],xst[:,1],c=ys,marker='+',label='Learned mapping')\npl.title(\"Learned mapping\")\n\n\n\n#%% Kernel mapping estimation\n\neta=1e-5 # quadratic regularization for regression\nmu=1e-1 # weight of the OT linear term\nbias=True # estimate a bias\nsigma=1 # sigma bandwidth fot gaussian kernel\n\n\not_mapping_kernel=ot.da.OTDA_mapping_kernel()\not_mapping_kernel.fit(xs,xt,mu=mu,eta=eta,sigma=sigma,bias=bias,numItermax = 10,verbose=True)\n\nxst_kernel=ot_mapping_kernel.predict(xs) # use the estimated mapping\nxst0_kernel=ot_mapping_kernel.interp() # use barycentric mapping\n\n\n#%% Plotting the mapped samples\n\npl.figure(2,(10,7))\npl.clf()\npl.subplot(2,2,1)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)\npl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Mapped source samples')\npl.title(\"Bary. mapping (linear)\")\npl.legend(loc=0)\n\npl.subplot(2,2,2)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)\npl.scatter(xst[:,0],xst[:,1],c=ys,marker='+',label='Learned mapping')\npl.title(\"Estim. mapping (linear)\")\n\npl.subplot(2,2,3)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)\npl.scatter(xst0_kernel[:,0],xst0_kernel[:,1],c=ys,marker='+',label='barycentric mapping')\npl.title(\"Bary. mapping (kernel)\")\n\npl.subplot(2,2,4)\npl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)\npl.scatter(xst_kernel[:,0],xst_kernel[:,1],c=ys,marker='+',label='Learned mapping')\npl.title(\"Estim. mapping (kernel)\")"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OTDA_mapping.py b/docs/source/auto_examples/plot_OTDA_mapping.py
deleted file mode 100644
index 78b57e7..0000000
--- a/docs/source/auto_examples/plot_OTDA_mapping.py
+++ /dev/null
@@ -1,110 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-===============================================
-OT mapping estimation for domain adaptation [8]
-===============================================
-
-[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, "Mapping estimation for
- discrete optimal transport", Neural Information Processing Systems (NIPS), 2016.
-"""
-
-import numpy as np
-import matplotlib.pylab as pl
-import ot
-
-
-
-#%% dataset generation
-
-np.random.seed(0) # makes example reproducible
-
-n=100 # nb samples in source and target datasets
-theta=2*np.pi/20
-nz=0.1
-xs,ys=ot.datasets.get_data_classif('gaussrot',n,nz=nz)
-xt,yt=ot.datasets.get_data_classif('gaussrot',n,theta=theta,nz=nz)
-
-# one of the target mode changes its variance (no linear mapping)
-xt[yt==2]*=3
-xt=xt+4
-
-
-#%% plot samples
-
-pl.figure(1,(8,5))
-pl.clf()
-
-pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
-pl.legend(loc=0)
-pl.title('Source and target distributions')
-
-
-
-#%% OT linear mapping estimation
-
-eta=1e-8 # quadratic regularization for regression
-mu=1e0 # weight of the OT linear term
-bias=True # estimate a bias
-
-ot_mapping=ot.da.OTDA_mapping_linear()
-ot_mapping.fit(xs,xt,mu=mu,eta=eta,bias=bias,numItermax = 20,verbose=True)
-
-xst=ot_mapping.predict(xs) # use the estimated mapping
-xst0=ot_mapping.interp() # use barycentric mapping
-
-
-pl.figure(2,(10,7))
-pl.clf()
-pl.subplot(2,2,1)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.3)
-pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='barycentric mapping')
-pl.title("barycentric mapping")
-
-pl.subplot(2,2,2)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.3)
-pl.scatter(xst[:,0],xst[:,1],c=ys,marker='+',label='Learned mapping')
-pl.title("Learned mapping")
-
-
-
-#%% Kernel mapping estimation
-
-eta=1e-5 # quadratic regularization for regression
-mu=1e-1 # weight of the OT linear term
-bias=True # estimate a bias
-sigma=1 # sigma bandwidth fot gaussian kernel
-
-
-ot_mapping_kernel=ot.da.OTDA_mapping_kernel()
-ot_mapping_kernel.fit(xs,xt,mu=mu,eta=eta,sigma=sigma,bias=bias,numItermax = 10,verbose=True)
-
-xst_kernel=ot_mapping_kernel.predict(xs) # use the estimated mapping
-xst0_kernel=ot_mapping_kernel.interp() # use barycentric mapping
-
-
-#%% Plotting the mapped samples
-
-pl.figure(2,(10,7))
-pl.clf()
-pl.subplot(2,2,1)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
-pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Mapped source samples')
-pl.title("Bary. mapping (linear)")
-pl.legend(loc=0)
-
-pl.subplot(2,2,2)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
-pl.scatter(xst[:,0],xst[:,1],c=ys,marker='+',label='Learned mapping')
-pl.title("Estim. mapping (linear)")
-
-pl.subplot(2,2,3)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
-pl.scatter(xst0_kernel[:,0],xst0_kernel[:,1],c=ys,marker='+',label='barycentric mapping')
-pl.title("Bary. mapping (kernel)")
-
-pl.subplot(2,2,4)
-pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
-pl.scatter(xst_kernel[:,0],xst_kernel[:,1],c=ys,marker='+',label='Learned mapping')
-pl.title("Estim. mapping (kernel)")
diff --git a/docs/source/auto_examples/plot_OTDA_mapping.rst b/docs/source/auto_examples/plot_OTDA_mapping.rst
deleted file mode 100644
index 18da90d..0000000
--- a/docs/source/auto_examples/plot_OTDA_mapping.rst
+++ /dev/null
@@ -1,186 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_plot_OTDA_mapping.py:
-
-
-===============================================
-OT mapping estimation for domain adaptation [8]
-===============================================
-
-[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, "Mapping estimation for
- discrete optimal transport", Neural Information Processing Systems (NIPS), 2016.
-
-
-
-
-.. rst-class:: sphx-glr-horizontal
-
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_mapping_001.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_mapping_002.png
- :scale: 47
-
-
-.. rst-class:: sphx-glr-script-out
-
- Out::
-
- It. |Loss |Delta loss
- --------------------------------
- 0|4.009366e+03|0.000000e+00
- 1|3.999933e+03|-2.352753e-03
- 2|3.999520e+03|-1.031984e-04
- 3|3.999362e+03|-3.936391e-05
- 4|3.999281e+03|-2.032868e-05
- 5|3.999238e+03|-1.083083e-05
- 6|3.999229e+03|-2.125291e-06
- It. |Loss |Delta loss
- --------------------------------
- 0|4.026841e+02|0.000000e+00
- 1|3.990791e+02|-8.952439e-03
- 2|3.987954e+02|-7.107124e-04
- 3|3.986554e+02|-3.512453e-04
- 4|3.985721e+02|-2.087997e-04
- 5|3.985141e+02|-1.456184e-04
- 6|3.984729e+02|-1.034624e-04
- 7|3.984435e+02|-7.366943e-05
- 8|3.984199e+02|-5.922497e-05
- 9|3.984016e+02|-4.593063e-05
- 10|3.983867e+02|-3.733061e-05
-
-
-
-
-|
-
-
-.. code-block:: python
-
-
- import numpy as np
- import matplotlib.pylab as pl
- import ot
-
-
-
- #%% dataset generation
-
- np.random.seed(0) # makes example reproducible
-
- n=100 # nb samples in source and target datasets
- theta=2*np.pi/20
- nz=0.1
- xs,ys=ot.datasets.get_data_classif('gaussrot',n,nz=nz)
- xt,yt=ot.datasets.get_data_classif('gaussrot',n,theta=theta,nz=nz)
-
- # one of the target mode changes its variance (no linear mapping)
- xt[yt==2]*=3
- xt=xt+4
-
-
- #%% plot samples
-
- pl.figure(1,(8,5))
- pl.clf()
-
- pl.scatter(xs[:,0],xs[:,1],c=ys,marker='+',label='Source samples')
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples')
-
- pl.legend(loc=0)
- pl.title('Source and target distributions')
-
-
-
- #%% OT linear mapping estimation
-
- eta=1e-8 # quadratic regularization for regression
- mu=1e0 # weight of the OT linear term
- bias=True # estimate a bias
-
- ot_mapping=ot.da.OTDA_mapping_linear()
- ot_mapping.fit(xs,xt,mu=mu,eta=eta,bias=bias,numItermax = 20,verbose=True)
-
- xst=ot_mapping.predict(xs) # use the estimated mapping
- xst0=ot_mapping.interp() # use barycentric mapping
-
-
- pl.figure(2,(10,7))
- pl.clf()
- pl.subplot(2,2,1)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.3)
- pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='barycentric mapping')
- pl.title("barycentric mapping")
-
- pl.subplot(2,2,2)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.3)
- pl.scatter(xst[:,0],xst[:,1],c=ys,marker='+',label='Learned mapping')
- pl.title("Learned mapping")
-
-
-
- #%% Kernel mapping estimation
-
- eta=1e-5 # quadratic regularization for regression
- mu=1e-1 # weight of the OT linear term
- bias=True # estimate a bias
- sigma=1 # sigma bandwidth fot gaussian kernel
-
-
- ot_mapping_kernel=ot.da.OTDA_mapping_kernel()
- ot_mapping_kernel.fit(xs,xt,mu=mu,eta=eta,sigma=sigma,bias=bias,numItermax = 10,verbose=True)
-
- xst_kernel=ot_mapping_kernel.predict(xs) # use the estimated mapping
- xst0_kernel=ot_mapping_kernel.interp() # use barycentric mapping
-
-
- #%% Plotting the mapped samples
-
- pl.figure(2,(10,7))
- pl.clf()
- pl.subplot(2,2,1)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
- pl.scatter(xst0[:,0],xst0[:,1],c=ys,marker='+',label='Mapped source samples')
- pl.title("Bary. mapping (linear)")
- pl.legend(loc=0)
-
- pl.subplot(2,2,2)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
- pl.scatter(xst[:,0],xst[:,1],c=ys,marker='+',label='Learned mapping')
- pl.title("Estim. mapping (linear)")
-
- pl.subplot(2,2,3)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
- pl.scatter(xst0_kernel[:,0],xst0_kernel[:,1],c=ys,marker='+',label='barycentric mapping')
- pl.title("Bary. mapping (kernel)")
-
- pl.subplot(2,2,4)
- pl.scatter(xt[:,0],xt[:,1],c=yt,marker='o',label='Target samples',alpha=.2)
- pl.scatter(xst_kernel[:,0],xst_kernel[:,1],c=ys,marker='+',label='Learned mapping')
- pl.title("Estim. mapping (kernel)")
-
-**Total running time of the script:** ( 0 minutes 0.882 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: plot_OTDA_mapping.py <plot_OTDA_mapping.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: plot_OTDA_mapping.ipynb <plot_OTDA_mapping.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OTDA_mapping_color_images.ipynb b/docs/source/auto_examples/plot_OTDA_mapping_color_images.ipynb
deleted file mode 100644
index 1136cc3..0000000
--- a/docs/source/auto_examples/plot_OTDA_mapping_color_images.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n====================================================================================\nOT for domain adaptation with image color adaptation [6] with mapping estimation [8]\n====================================================================================\n\n[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized\n discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3), 1853-1882.\n[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, \"Mapping estimation for\n discrete optimal transport\", Neural Information Processing Systems (NIPS), 2016.\n\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import numpy as np\nimport scipy.ndimage as spi\nimport matplotlib.pylab as pl\nimport ot\n\n\n#%% Loading images\n\nI1=spi.imread('../data/ocean_day.jpg').astype(np.float64)/256\nI2=spi.imread('../data/ocean_sunset.jpg').astype(np.float64)/256\n\n#%% Plot images\n\npl.figure(1)\n\npl.subplot(1,2,1)\npl.imshow(I1)\npl.title('Image 1')\n\npl.subplot(1,2,2)\npl.imshow(I2)\npl.title('Image 2')\n\npl.show()\n\n#%% Image conversion and dataset generation\n\ndef im2mat(I):\n \"\"\"Converts and image to matrix (one pixel per line)\"\"\"\n return I.reshape((I.shape[0]*I.shape[1],I.shape[2]))\n\ndef mat2im(X,shape):\n \"\"\"Converts back a matrix to an image\"\"\"\n return X.reshape(shape)\n\nX1=im2mat(I1)\nX2=im2mat(I2)\n\n# training samples\nnb=1000\nidx1=np.random.randint(X1.shape[0],size=(nb,))\nidx2=np.random.randint(X2.shape[0],size=(nb,))\n\nxs=X1[idx1,:]\nxt=X2[idx2,:]\n\n#%% Plot image distributions\n\n\npl.figure(2,(10,5))\n\npl.subplot(1,2,1)\npl.scatter(xs[:,0],xs[:,2],c=xs)\npl.axis([0,1,0,1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 1')\n\npl.subplot(1,2,2)\n#pl.imshow(I2)\npl.scatter(xt[:,0],xt[:,2],c=xt)\npl.axis([0,1,0,1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 2')\n\npl.show()\n\n\n\n#%% domain adaptation between images\ndef minmax(I):\n return np.minimum(np.maximum(I,0),1)\n# LP problem\nda_emd=ot.da.OTDA() # init class\nda_emd.fit(xs,xt) # fit distributions\n\nX1t=da_emd.predict(X1) # out of sample\nI1t=minmax(mat2im(X1t,I1.shape))\n\n# sinkhorn regularization\nlambd=1e-1\nda_entrop=ot.da.OTDA_sinkhorn()\nda_entrop.fit(xs,xt,reg=lambd)\n\nX1te=da_entrop.predict(X1)\nI1te=minmax(mat2im(X1te,I1.shape))\n\n# linear mapping estimation\neta=1e-8 # quadratic regularization for regression\nmu=1e0 # weight of the OT linear term\nbias=True # estimate a bias\n\not_mapping=ot.da.OTDA_mapping_linear()\not_mapping.fit(xs,xt,mu=mu,eta=eta,bias=bias,numItermax = 20,verbose=True)\n\nX1tl=ot_mapping.predict(X1) # use the estimated mapping\nI1tl=minmax(mat2im(X1tl,I1.shape))\n\n# nonlinear mapping estimation\neta=1e-2 # quadratic regularization for regression\nmu=1e0 # weight of the OT linear term\nbias=False # estimate a bias\nsigma=1 # sigma bandwidth fot gaussian kernel\n\n\not_mapping_kernel=ot.da.OTDA_mapping_kernel()\not_mapping_kernel.fit(xs,xt,mu=mu,eta=eta,sigma=sigma,bias=bias,numItermax = 10,verbose=True)\n\nX1tn=ot_mapping_kernel.predict(X1) # use the estimated mapping\nI1tn=minmax(mat2im(X1tn,I1.shape))\n#%% plot images\n\n\npl.figure(2,(10,8))\n\npl.subplot(2,3,1)\n\npl.imshow(I1)\npl.title('Im. 1')\n\npl.subplot(2,3,2)\n\npl.imshow(I2)\npl.title('Im. 2')\n\n\npl.subplot(2,3,3)\npl.imshow(I1t)\npl.title('Im. 1 Interp LP')\n\npl.subplot(2,3,4)\npl.imshow(I1te)\npl.title('Im. 1 Interp Entrop')\n\n\npl.subplot(2,3,5)\npl.imshow(I1tl)\npl.title('Im. 1 Linear mapping')\n\npl.subplot(2,3,6)\npl.imshow(I1tn)\npl.title('Im. 1 nonlinear mapping')\n\npl.show()"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OTDA_mapping_color_images.py b/docs/source/auto_examples/plot_OTDA_mapping_color_images.py
deleted file mode 100644
index f07dc6c..0000000
--- a/docs/source/auto_examples/plot_OTDA_mapping_color_images.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-====================================================================================
-OT for domain adaptation with image color adaptation [6] with mapping estimation [8]
-====================================================================================
-
-[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized
- discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
-[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, "Mapping estimation for
- discrete optimal transport", Neural Information Processing Systems (NIPS), 2016.
-
-"""
-
-import numpy as np
-import scipy.ndimage as spi
-import matplotlib.pylab as pl
-import ot
-
-
-#%% Loading images
-
-I1=spi.imread('../data/ocean_day.jpg').astype(np.float64)/256
-I2=spi.imread('../data/ocean_sunset.jpg').astype(np.float64)/256
-
-#%% Plot images
-
-pl.figure(1)
-
-pl.subplot(1,2,1)
-pl.imshow(I1)
-pl.title('Image 1')
-
-pl.subplot(1,2,2)
-pl.imshow(I2)
-pl.title('Image 2')
-
-pl.show()
-
-#%% Image conversion and dataset generation
-
-def im2mat(I):
- """Converts and image to matrix (one pixel per line)"""
- return I.reshape((I.shape[0]*I.shape[1],I.shape[2]))
-
-def mat2im(X,shape):
- """Converts back a matrix to an image"""
- return X.reshape(shape)
-
-X1=im2mat(I1)
-X2=im2mat(I2)
-
-# training samples
-nb=1000
-idx1=np.random.randint(X1.shape[0],size=(nb,))
-idx2=np.random.randint(X2.shape[0],size=(nb,))
-
-xs=X1[idx1,:]
-xt=X2[idx2,:]
-
-#%% Plot image distributions
-
-
-pl.figure(2,(10,5))
-
-pl.subplot(1,2,1)
-pl.scatter(xs[:,0],xs[:,2],c=xs)
-pl.axis([0,1,0,1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 1')
-
-pl.subplot(1,2,2)
-#pl.imshow(I2)
-pl.scatter(xt[:,0],xt[:,2],c=xt)
-pl.axis([0,1,0,1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 2')
-
-pl.show()
-
-
-
-#%% domain adaptation between images
-def minmax(I):
- return np.minimum(np.maximum(I,0),1)
-# LP problem
-da_emd=ot.da.OTDA() # init class
-da_emd.fit(xs,xt) # fit distributions
-
-X1t=da_emd.predict(X1) # out of sample
-I1t=minmax(mat2im(X1t,I1.shape))
-
-# sinkhorn regularization
-lambd=1e-1
-da_entrop=ot.da.OTDA_sinkhorn()
-da_entrop.fit(xs,xt,reg=lambd)
-
-X1te=da_entrop.predict(X1)
-I1te=minmax(mat2im(X1te,I1.shape))
-
-# linear mapping estimation
-eta=1e-8 # quadratic regularization for regression
-mu=1e0 # weight of the OT linear term
-bias=True # estimate a bias
-
-ot_mapping=ot.da.OTDA_mapping_linear()
-ot_mapping.fit(xs,xt,mu=mu,eta=eta,bias=bias,numItermax = 20,verbose=True)
-
-X1tl=ot_mapping.predict(X1) # use the estimated mapping
-I1tl=minmax(mat2im(X1tl,I1.shape))
-
-# nonlinear mapping estimation
-eta=1e-2 # quadratic regularization for regression
-mu=1e0 # weight of the OT linear term
-bias=False # estimate a bias
-sigma=1 # sigma bandwidth fot gaussian kernel
-
-
-ot_mapping_kernel=ot.da.OTDA_mapping_kernel()
-ot_mapping_kernel.fit(xs,xt,mu=mu,eta=eta,sigma=sigma,bias=bias,numItermax = 10,verbose=True)
-
-X1tn=ot_mapping_kernel.predict(X1) # use the estimated mapping
-I1tn=minmax(mat2im(X1tn,I1.shape))
-#%% plot images
-
-
-pl.figure(2,(10,8))
-
-pl.subplot(2,3,1)
-
-pl.imshow(I1)
-pl.title('Im. 1')
-
-pl.subplot(2,3,2)
-
-pl.imshow(I2)
-pl.title('Im. 2')
-
-
-pl.subplot(2,3,3)
-pl.imshow(I1t)
-pl.title('Im. 1 Interp LP')
-
-pl.subplot(2,3,4)
-pl.imshow(I1te)
-pl.title('Im. 1 Interp Entrop')
-
-
-pl.subplot(2,3,5)
-pl.imshow(I1tl)
-pl.title('Im. 1 Linear mapping')
-
-pl.subplot(2,3,6)
-pl.imshow(I1tn)
-pl.title('Im. 1 nonlinear mapping')
-
-pl.show()
diff --git a/docs/source/auto_examples/plot_OTDA_mapping_color_images.rst b/docs/source/auto_examples/plot_OTDA_mapping_color_images.rst
deleted file mode 100644
index 60be3a4..0000000
--- a/docs/source/auto_examples/plot_OTDA_mapping_color_images.rst
+++ /dev/null
@@ -1,246 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_plot_OTDA_mapping_color_images.py:
-
-
-====================================================================================
-OT for domain adaptation with image color adaptation [6] with mapping estimation [8]
-====================================================================================
-
-[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized
- discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
-[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, "Mapping estimation for
- discrete optimal transport", Neural Information Processing Systems (NIPS), 2016.
-
-
-
-
-
-.. rst-class:: sphx-glr-horizontal
-
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_001.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OTDA_mapping_color_images_002.png
- :scale: 47
-
-
-.. rst-class:: sphx-glr-script-out
-
- Out::
-
- It. |Loss |Delta loss
- --------------------------------
- 0|3.624802e+02|0.000000e+00
- 1|3.547180e+02|-2.141395e-02
- 2|3.545494e+02|-4.753955e-04
- 3|3.544646e+02|-2.391784e-04
- 4|3.544126e+02|-1.466280e-04
- 5|3.543775e+02|-9.921805e-05
- 6|3.543518e+02|-7.245828e-05
- 7|3.543323e+02|-5.491924e-05
- 8|3.543170e+02|-4.342401e-05
- 9|3.543046e+02|-3.472174e-05
- 10|3.542945e+02|-2.878681e-05
- 11|3.542859e+02|-2.417065e-05
- 12|3.542786e+02|-2.058131e-05
- 13|3.542723e+02|-1.768262e-05
- 14|3.542668e+02|-1.551616e-05
- 15|3.542620e+02|-1.371909e-05
- 16|3.542577e+02|-1.213326e-05
- 17|3.542538e+02|-1.085481e-05
- 18|3.542531e+02|-1.996006e-06
- It. |Loss |Delta loss
- --------------------------------
- 0|3.555768e+02|0.000000e+00
- 1|3.510071e+02|-1.285164e-02
- 2|3.509110e+02|-2.736701e-04
- 3|3.508748e+02|-1.031476e-04
- 4|3.508506e+02|-6.910585e-05
- 5|3.508330e+02|-5.014608e-05
- 6|3.508195e+02|-3.839166e-05
- 7|3.508090e+02|-3.004218e-05
- 8|3.508005e+02|-2.417627e-05
- 9|3.507935e+02|-2.004621e-05
- 10|3.507876e+02|-1.681731e-05
-
-
-
-
-|
-
-
-.. code-block:: python
-
-
- import numpy as np
- import scipy.ndimage as spi
- import matplotlib.pylab as pl
- import ot
-
-
- #%% Loading images
-
- I1=spi.imread('../data/ocean_day.jpg').astype(np.float64)/256
- I2=spi.imread('../data/ocean_sunset.jpg').astype(np.float64)/256
-
- #%% Plot images
-
- pl.figure(1)
-
- pl.subplot(1,2,1)
- pl.imshow(I1)
- pl.title('Image 1')
-
- pl.subplot(1,2,2)
- pl.imshow(I2)
- pl.title('Image 2')
-
- pl.show()
-
- #%% Image conversion and dataset generation
-
- def im2mat(I):
- """Converts and image to matrix (one pixel per line)"""
- return I.reshape((I.shape[0]*I.shape[1],I.shape[2]))
-
- def mat2im(X,shape):
- """Converts back a matrix to an image"""
- return X.reshape(shape)
-
- X1=im2mat(I1)
- X2=im2mat(I2)
-
- # training samples
- nb=1000
- idx1=np.random.randint(X1.shape[0],size=(nb,))
- idx2=np.random.randint(X2.shape[0],size=(nb,))
-
- xs=X1[idx1,:]
- xt=X2[idx2,:]
-
- #%% Plot image distributions
-
-
- pl.figure(2,(10,5))
-
- pl.subplot(1,2,1)
- pl.scatter(xs[:,0],xs[:,2],c=xs)
- pl.axis([0,1,0,1])
- pl.xlabel('Red')
- pl.ylabel('Blue')
- pl.title('Image 1')
-
- pl.subplot(1,2,2)
- #pl.imshow(I2)
- pl.scatter(xt[:,0],xt[:,2],c=xt)
- pl.axis([0,1,0,1])
- pl.xlabel('Red')
- pl.ylabel('Blue')
- pl.title('Image 2')
-
- pl.show()
-
-
-
- #%% domain adaptation between images
- def minmax(I):
- return np.minimum(np.maximum(I,0),1)
- # LP problem
- da_emd=ot.da.OTDA() # init class
- da_emd.fit(xs,xt) # fit distributions
-
- X1t=da_emd.predict(X1) # out of sample
- I1t=minmax(mat2im(X1t,I1.shape))
-
- # sinkhorn regularization
- lambd=1e-1
- da_entrop=ot.da.OTDA_sinkhorn()
- da_entrop.fit(xs,xt,reg=lambd)
-
- X1te=da_entrop.predict(X1)
- I1te=minmax(mat2im(X1te,I1.shape))
-
- # linear mapping estimation
- eta=1e-8 # quadratic regularization for regression
- mu=1e0 # weight of the OT linear term
- bias=True # estimate a bias
-
- ot_mapping=ot.da.OTDA_mapping_linear()
- ot_mapping.fit(xs,xt,mu=mu,eta=eta,bias=bias,numItermax = 20,verbose=True)
-
- X1tl=ot_mapping.predict(X1) # use the estimated mapping
- I1tl=minmax(mat2im(X1tl,I1.shape))
-
- # nonlinear mapping estimation
- eta=1e-2 # quadratic regularization for regression
- mu=1e0 # weight of the OT linear term
- bias=False # estimate a bias
- sigma=1 # sigma bandwidth fot gaussian kernel
-
-
- ot_mapping_kernel=ot.da.OTDA_mapping_kernel()
- ot_mapping_kernel.fit(xs,xt,mu=mu,eta=eta,sigma=sigma,bias=bias,numItermax = 10,verbose=True)
-
- X1tn=ot_mapping_kernel.predict(X1) # use the estimated mapping
- I1tn=minmax(mat2im(X1tn,I1.shape))
- #%% plot images
-
-
- pl.figure(2,(10,8))
-
- pl.subplot(2,3,1)
-
- pl.imshow(I1)
- pl.title('Im. 1')
-
- pl.subplot(2,3,2)
-
- pl.imshow(I2)
- pl.title('Im. 2')
-
-
- pl.subplot(2,3,3)
- pl.imshow(I1t)
- pl.title('Im. 1 Interp LP')
-
- pl.subplot(2,3,4)
- pl.imshow(I1te)
- pl.title('Im. 1 Interp Entrop')
-
-
- pl.subplot(2,3,5)
- pl.imshow(I1tl)
- pl.title('Im. 1 Linear mapping')
-
- pl.subplot(2,3,6)
- pl.imshow(I1tn)
- pl.title('Im. 1 nonlinear mapping')
-
- pl.show()
-
-**Total running time of the script:** ( 1 minutes 59.537 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: plot_OTDA_mapping_color_images.py <plot_OTDA_mapping_color_images.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: plot_OTDA_mapping_color_images.ipynb <plot_OTDA_mapping_color_images.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OT_1D.ipynb b/docs/source/auto_examples/plot_OT_1D.ipynb
index 97c593e..3126b6f 100644
--- a/docs/source/auto_examples/plot_OT_1D.ipynb
+++ b/docs/source/auto_examples/plot_OT_1D.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n# 1D optimal transport\n\n\n\n"
+ "\n# 1D optimal transport\n\n\nThis example illustrates the computation of EMD and Sinkhorn transport plans \nand their visualization.\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -69,7 +69,7 @@
},
{
"source": [
- "Solve EMD \n#############################################################################\n\n"
+ "Solve EMD\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
diff --git a/docs/source/auto_examples/plot_OT_1D.py b/docs/source/auto_examples/plot_OT_1D.py
index be6f5b3..a63f29a 100644
--- a/docs/source/auto_examples/plot_OT_1D.py
+++ b/docs/source/auto_examples/plot_OT_1D.py
@@ -4,6 +4,9 @@
1D optimal transport
====================
+This example illustrates the computation of EMD and Sinkhorn transport plans
+and their visualization.
+
"""
# Author: Remi Flamary <remi.flamary@unice.fr>
@@ -52,7 +55,7 @@ pl.figure(2, figsize=(5, 5))
ot.plot.plot1D_mat(a, b, M, 'Cost matrix M')
##############################################################################
-# Solve EMD
+# Solve EMD
##############################################################################
#%% EMD
diff --git a/docs/source/auto_examples/plot_OT_1D.rst b/docs/source/auto_examples/plot_OT_1D.rst
index 252d387..ff02180 100644
--- a/docs/source/auto_examples/plot_OT_1D.rst
+++ b/docs/source/auto_examples/plot_OT_1D.rst
@@ -7,6 +7,9 @@
1D optimal transport
====================
+This example illustrates the computation of EMD and Sinkhorn transport plans
+and their visualization.
+
@@ -97,7 +100,7 @@ Plot distributions and loss matrix
-Solve EMD
+Solve EMD
#############################################################################
@@ -165,7 +168,7 @@ Solve Sinkhorn
110|1.527180e-10|
-**Total running time of the script:** ( 0 minutes 1.065 seconds)
+**Total running time of the script:** ( 0 minutes 0.770 seconds)
@@ -184,4 +187,4 @@ Solve Sinkhorn
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OT_2D_samples.ipynb b/docs/source/auto_examples/plot_OT_2D_samples.ipynb
index fc4ce50..0ed7367 100644
--- a/docs/source/auto_examples/plot_OT_2D_samples.ipynb
+++ b/docs/source/auto_examples/plot_OT_2D_samples.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n# 2D Optimal transport between empirical distributions\n\n\n\n"
+ "\n# 2D Optimal transport between empirical distributions\n\n\nIllustration of 2D optimal transport between discributions that are weighted\nsum of diracs. The OT matrix is plotted with the samples.\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -24,7 +24,79 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot\n\n#%% parameters and data generation\n\nn = 50 # nb samples\n\nmu_s = np.array([0, 0])\ncov_s = np.array([[1, 0], [0, 1]])\n\nmu_t = np.array([4, 4])\ncov_t = np.array([[1, -.8], [-.8, 1]])\n\nxs = ot.datasets.get_2D_samples_gauss(n, mu_s, cov_s)\nxt = ot.datasets.get_2D_samples_gauss(n, mu_t, cov_t)\n\na, b = np.ones((n,)) / n, np.ones((n,)) / n # uniform distribution on samples\n\n# loss matrix\nM = ot.dist(xs, xt)\nM /= M.max()\n\n#%% plot samples\n\npl.figure(1)\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.legend(loc=0)\npl.title('Source and target distributions')\n\npl.figure(2)\npl.imshow(M, interpolation='nearest')\npl.title('Cost matrix M')\n\n\n#%% EMD\n\nG0 = ot.emd(a, b, M)\n\npl.figure(3)\npl.imshow(G0, interpolation='nearest')\npl.title('OT matrix G0')\n\npl.figure(4)\not.plot.plot2D_samples_mat(xs, xt, G0, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.legend(loc=0)\npl.title('OT matrix with samples')\n\n\n#%% sinkhorn\n\n# reg term\nlambd = 1e-3\n\nGs = ot.sinkhorn(a, b, M, lambd)\n\npl.figure(5)\npl.imshow(Gs, interpolation='nearest')\npl.title('OT matrix sinkhorn')\n\npl.figure(6)\not.plot.plot2D_samples_mat(xs, xt, Gs, color=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.legend(loc=0)\npl.title('OT matrix Sinkhorn with samples')\n\npl.show()"
+ "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Generate data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% parameters and data generation\n\nn = 50 # nb samples\n\nmu_s = np.array([0, 0])\ncov_s = np.array([[1, 0], [0, 1]])\n\nmu_t = np.array([4, 4])\ncov_t = np.array([[1, -.8], [-.8, 1]])\n\nxs = ot.datasets.get_2D_samples_gauss(n, mu_s, cov_s)\nxt = ot.datasets.get_2D_samples_gauss(n, mu_t, cov_t)\n\na, b = np.ones((n,)) / n, np.ones((n,)) / n # uniform distribution on samples\n\n# loss matrix\nM = ot.dist(xs, xt)\nM /= M.max()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Plot data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% plot samples\n\npl.figure(1)\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.legend(loc=0)\npl.title('Source and target distributions')\n\npl.figure(2)\npl.imshow(M, interpolation='nearest')\npl.title('Cost matrix M')"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Compute EMD\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% EMD\n\nG0 = ot.emd(a, b, M)\n\npl.figure(3)\npl.imshow(G0, interpolation='nearest')\npl.title('OT matrix G0')\n\npl.figure(4)\not.plot.plot2D_samples_mat(xs, xt, G0, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.legend(loc=0)\npl.title('OT matrix with samples')"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Compute Sinkhorn\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% sinkhorn\n\n# reg term\nlambd = 1e-3\n\nGs = ot.sinkhorn(a, b, M, lambd)\n\npl.figure(5)\npl.imshow(Gs, interpolation='nearest')\npl.title('OT matrix sinkhorn')\n\npl.figure(6)\not.plot.plot2D_samples_mat(xs, xt, Gs, color=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.legend(loc=0)\npl.title('OT matrix Sinkhorn with samples')\n\npl.show()"
],
"outputs": [],
"metadata": {
diff --git a/docs/source/auto_examples/plot_OT_2D_samples.py b/docs/source/auto_examples/plot_OT_2D_samples.py
index 2a42dc0..f57d631 100644
--- a/docs/source/auto_examples/plot_OT_2D_samples.py
+++ b/docs/source/auto_examples/plot_OT_2D_samples.py
@@ -4,6 +4,9 @@
2D Optimal transport between empirical distributions
====================================================
+Illustration of 2D optimal transport between discributions that are weighted
+sum of diracs. The OT matrix is plotted with the samples.
+
"""
# Author: Remi Flamary <remi.flamary@unice.fr>
@@ -14,6 +17,10 @@ import numpy as np
import matplotlib.pylab as pl
import ot
+##############################################################################
+# Generate data
+##############################################################################
+
#%% parameters and data generation
n = 50 # nb samples
@@ -33,6 +40,10 @@ a, b = np.ones((n,)) / n, np.ones((n,)) / n # uniform distribution on samples
M = ot.dist(xs, xt)
M /= M.max()
+##############################################################################
+# Plot data
+##############################################################################
+
#%% plot samples
pl.figure(1)
@@ -45,6 +56,9 @@ pl.figure(2)
pl.imshow(M, interpolation='nearest')
pl.title('Cost matrix M')
+##############################################################################
+# Compute EMD
+##############################################################################
#%% EMD
@@ -62,6 +76,10 @@ pl.legend(loc=0)
pl.title('OT matrix with samples')
+##############################################################################
+# Compute Sinkhorn
+##############################################################################
+
#%% sinkhorn
# reg term
diff --git a/docs/source/auto_examples/plot_OT_2D_samples.rst b/docs/source/auto_examples/plot_OT_2D_samples.rst
index c472c6a..f95ffaf 100644
--- a/docs/source/auto_examples/plot_OT_2D_samples.rst
+++ b/docs/source/auto_examples/plot_OT_2D_samples.rst
@@ -7,58 +7,37 @@
2D Optimal transport between empirical distributions
====================================================
+Illustration of 2D optimal transport between discributions that are weighted
+sum of diracs. The OT matrix is plotted with the samples.
-.. rst-class:: sphx-glr-horizontal
-
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_001.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_002.png
- :scale: 47
+.. code-block:: python
- *
- .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_003.png
- :scale: 47
+ # Author: Remi Flamary <remi.flamary@unice.fr>
+ #
+ # License: MIT License
- *
+ import numpy as np
+ import matplotlib.pylab as pl
+ import ot
- .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_004.png
- :scale: 47
- *
- .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_005.png
- :scale: 47
- *
- .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_006.png
- :scale: 47
+Generate data
+#############################################################################
.. code-block:: python
- # Author: Remi Flamary <remi.flamary@unice.fr>
- #
- # License: MIT License
-
- import numpy as np
- import matplotlib.pylab as pl
- import ot
-
#%% parameters and data generation
n = 50 # nb samples
@@ -78,6 +57,20 @@
M = ot.dist(xs, xt)
M /= M.max()
+
+
+
+
+
+
+Plot data
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% plot samples
pl.figure(1)
@@ -91,6 +84,32 @@
pl.title('Cost matrix M')
+
+
+.. rst-class:: sphx-glr-horizontal
+
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_001.png
+ :scale: 47
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_002.png
+ :scale: 47
+
+
+
+
+Compute EMD
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% EMD
G0 = ot.emd(a, b, M)
@@ -107,6 +126,33 @@
pl.title('OT matrix with samples')
+
+
+
+.. rst-class:: sphx-glr-horizontal
+
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_005.png
+ :scale: 47
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_006.png
+ :scale: 47
+
+
+
+
+Compute Sinkhorn
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% sinkhorn
# reg term
@@ -127,7 +173,25 @@
pl.show()
-**Total running time of the script:** ( 0 minutes 2.908 seconds)
+
+
+.. rst-class:: sphx-glr-horizontal
+
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_009.png
+ :scale: 47
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_2D_samples_010.png
+ :scale: 47
+
+
+
+
+**Total running time of the script:** ( 0 minutes 1.990 seconds)
@@ -146,4 +210,4 @@
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OT_L1_vs_L2.ipynb b/docs/source/auto_examples/plot_OT_L1_vs_L2.ipynb
index 04ef5c8..e738db7 100644
--- a/docs/source/auto_examples/plot_OT_L1_vs_L2.ipynb
+++ b/docs/source/auto_examples/plot_OT_L1_vs_L2.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n# 2D Optimal transport for different metrics\n\n\nStole the figure idea from Fig. 1 and 2 in\nhttps://arxiv.org/pdf/1706.07650.pdf\n\n\n\n"
+ "\n# 2D Optimal transport for different metrics\n\n\n2D OT on empirical distributio with different gound metric.\n\nStole the figure idea from Fig. 1 and 2 in\nhttps://arxiv.org/pdf/1706.07650.pdf\n\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -24,7 +24,79 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot\n\n#%% parameters and data generation\n\nfor data in range(2):\n\n if data:\n n = 20 # nb samples\n xs = np.zeros((n, 2))\n xs[:, 0] = np.arange(n) + 1\n xs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...\n\n xt = np.zeros((n, 2))\n xt[:, 1] = np.arange(n) + 1\n else:\n\n n = 50 # nb samples\n xtot = np.zeros((n + 1, 2))\n xtot[:, 0] = np.cos(\n (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)\n xtot[:, 1] = np.sin(\n (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)\n\n xs = xtot[:n, :]\n xt = xtot[1:, :]\n\n a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples\n\n # loss matrix\n M1 = ot.dist(xs, xt, metric='euclidean')\n M1 /= M1.max()\n\n # loss matrix\n M2 = ot.dist(xs, xt, metric='sqeuclidean')\n M2 /= M2.max()\n\n # loss matrix\n Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))\n Mp /= Mp.max()\n\n #%% plot samples\n\n pl.figure(1 + 3 * data, figsize=(7, 3))\n pl.clf()\n pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\n pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\n pl.axis('equal')\n pl.title('Source and traget distributions')\n\n pl.figure(2 + 3 * data, figsize=(7, 3))\n\n pl.subplot(1, 3, 1)\n pl.imshow(M1, interpolation='nearest')\n pl.title('Euclidean cost')\n\n pl.subplot(1, 3, 2)\n pl.imshow(M2, interpolation='nearest')\n pl.title('Squared Euclidean cost')\n\n pl.subplot(1, 3, 3)\n pl.imshow(Mp, interpolation='nearest')\n pl.title('Sqrt Euclidean cost')\n pl.tight_layout()\n\n #%% EMD\n G1 = ot.emd(a, b, M1)\n G2 = ot.emd(a, b, M2)\n Gp = ot.emd(a, b, Mp)\n\n pl.figure(3 + 3 * data, figsize=(7, 3))\n\n pl.subplot(1, 3, 1)\n ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])\n pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\n pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\n pl.axis('equal')\n # pl.legend(loc=0)\n pl.title('OT Euclidean')\n\n pl.subplot(1, 3, 2)\n ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])\n pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\n pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\n pl.axis('equal')\n # pl.legend(loc=0)\n pl.title('OT squared Euclidean')\n\n pl.subplot(1, 3, 3)\n ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])\n pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\n pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\n pl.axis('equal')\n # pl.legend(loc=0)\n pl.title('OT sqrt Euclidean')\n pl.tight_layout()\n\npl.show()"
+ "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Dataset 1 : uniform sampling\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "n = 20 # nb samples\nxs = np.zeros((n, 2))\nxs[:, 0] = np.arange(n) + 1\nxs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...\n\nxt = np.zeros((n, 2))\nxt[:, 1] = np.arange(n) + 1\n\na, b = ot.unif(n), ot.unif(n) # uniform distribution on samples\n\n# loss matrix\nM1 = ot.dist(xs, xt, metric='euclidean')\nM1 /= M1.max()\n\n# loss matrix\nM2 = ot.dist(xs, xt, metric='sqeuclidean')\nM2 /= M2.max()\n\n# loss matrix\nMp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))\nMp /= Mp.max()\n\n# Data\npl.figure(1, figsize=(7, 3))\npl.clf()\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\npl.title('Source and traget distributions')\n\n\n# Cost matrices\npl.figure(2, figsize=(7, 3))\n\npl.subplot(1, 3, 1)\npl.imshow(M1, interpolation='nearest')\npl.title('Euclidean cost')\n\npl.subplot(1, 3, 2)\npl.imshow(M2, interpolation='nearest')\npl.title('Squared Euclidean cost')\n\npl.subplot(1, 3, 3)\npl.imshow(Mp, interpolation='nearest')\npl.title('Sqrt Euclidean cost')\npl.tight_layout()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Dataset 1 : Plot OT Matrices\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% EMD\nG1 = ot.emd(a, b, M1)\nG2 = ot.emd(a, b, M2)\nGp = ot.emd(a, b, Mp)\n\n# OT matrices\npl.figure(3, figsize=(7, 3))\n\npl.subplot(1, 3, 1)\not.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\n# pl.legend(loc=0)\npl.title('OT Euclidean')\n\npl.subplot(1, 3, 2)\not.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\n# pl.legend(loc=0)\npl.title('OT squared Euclidean')\n\npl.subplot(1, 3, 3)\not.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\n# pl.legend(loc=0)\npl.title('OT sqrt Euclidean')\npl.tight_layout()\n\npl.show()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Dataset 2 : Partial circle\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "n = 50 # nb samples\nxtot = np.zeros((n + 1, 2))\nxtot[:, 0] = np.cos(\n (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)\nxtot[:, 1] = np.sin(\n (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)\n\nxs = xtot[:n, :]\nxt = xtot[1:, :]\n\na, b = ot.unif(n), ot.unif(n) # uniform distribution on samples\n\n# loss matrix\nM1 = ot.dist(xs, xt, metric='euclidean')\nM1 /= M1.max()\n\n# loss matrix\nM2 = ot.dist(xs, xt, metric='sqeuclidean')\nM2 /= M2.max()\n\n# loss matrix\nMp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))\nMp /= Mp.max()\n\n\n# Data\npl.figure(4, figsize=(7, 3))\npl.clf()\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\npl.title('Source and traget distributions')\n\n\n# Cost matrices\npl.figure(5, figsize=(7, 3))\n\npl.subplot(1, 3, 1)\npl.imshow(M1, interpolation='nearest')\npl.title('Euclidean cost')\n\npl.subplot(1, 3, 2)\npl.imshow(M2, interpolation='nearest')\npl.title('Squared Euclidean cost')\n\npl.subplot(1, 3, 3)\npl.imshow(Mp, interpolation='nearest')\npl.title('Sqrt Euclidean cost')\npl.tight_layout()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Dataset 2 : Plot OT Matrices\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% EMD\nG1 = ot.emd(a, b, M1)\nG2 = ot.emd(a, b, M2)\nGp = ot.emd(a, b, Mp)\n\n# OT matrices\npl.figure(6, figsize=(7, 3))\n\npl.subplot(1, 3, 1)\not.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\n# pl.legend(loc=0)\npl.title('OT Euclidean')\n\npl.subplot(1, 3, 2)\not.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\n# pl.legend(loc=0)\npl.title('OT squared Euclidean')\n\npl.subplot(1, 3, 3)\not.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])\npl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')\npl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')\npl.axis('equal')\n# pl.legend(loc=0)\npl.title('OT sqrt Euclidean')\npl.tight_layout()\n\npl.show()"
],
"outputs": [],
"metadata": {
diff --git a/docs/source/auto_examples/plot_OT_L1_vs_L2.py b/docs/source/auto_examples/plot_OT_L1_vs_L2.py
index dfc9462..77bde22 100644
--- a/docs/source/auto_examples/plot_OT_L1_vs_L2.py
+++ b/docs/source/auto_examples/plot_OT_L1_vs_L2.py
@@ -4,6 +4,8 @@
2D Optimal transport for different metrics
==========================================
+2D OT on empirical distributio with different gound metric.
+
Stole the figure idea from Fig. 1 and 2 in
https://arxiv.org/pdf/1706.07650.pdf
@@ -18,98 +20,190 @@ import numpy as np
import matplotlib.pylab as pl
import ot
-#%% parameters and data generation
-
-for data in range(2):
-
- if data:
- n = 20 # nb samples
- xs = np.zeros((n, 2))
- xs[:, 0] = np.arange(n) + 1
- xs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...
-
- xt = np.zeros((n, 2))
- xt[:, 1] = np.arange(n) + 1
- else:
-
- n = 50 # nb samples
- xtot = np.zeros((n + 1, 2))
- xtot[:, 0] = np.cos(
- (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
- xtot[:, 1] = np.sin(
- (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
-
- xs = xtot[:n, :]
- xt = xtot[1:, :]
-
- a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
-
- # loss matrix
- M1 = ot.dist(xs, xt, metric='euclidean')
- M1 /= M1.max()
-
- # loss matrix
- M2 = ot.dist(xs, xt, metric='sqeuclidean')
- M2 /= M2.max()
-
- # loss matrix
- Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
- Mp /= Mp.max()
-
- #%% plot samples
-
- pl.figure(1 + 3 * data, figsize=(7, 3))
- pl.clf()
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- pl.title('Source and traget distributions')
-
- pl.figure(2 + 3 * data, figsize=(7, 3))
-
- pl.subplot(1, 3, 1)
- pl.imshow(M1, interpolation='nearest')
- pl.title('Euclidean cost')
-
- pl.subplot(1, 3, 2)
- pl.imshow(M2, interpolation='nearest')
- pl.title('Squared Euclidean cost')
-
- pl.subplot(1, 3, 3)
- pl.imshow(Mp, interpolation='nearest')
- pl.title('Sqrt Euclidean cost')
- pl.tight_layout()
-
- #%% EMD
- G1 = ot.emd(a, b, M1)
- G2 = ot.emd(a, b, M2)
- Gp = ot.emd(a, b, Mp)
-
- pl.figure(3 + 3 * data, figsize=(7, 3))
-
- pl.subplot(1, 3, 1)
- ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT Euclidean')
-
- pl.subplot(1, 3, 2)
- ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT squared Euclidean')
-
- pl.subplot(1, 3, 3)
- ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT sqrt Euclidean')
- pl.tight_layout()
+##############################################################################
+# Dataset 1 : uniform sampling
+##############################################################################
+
+n = 20 # nb samples
+xs = np.zeros((n, 2))
+xs[:, 0] = np.arange(n) + 1
+xs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...
+
+xt = np.zeros((n, 2))
+xt[:, 1] = np.arange(n) + 1
+
+a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
+
+# loss matrix
+M1 = ot.dist(xs, xt, metric='euclidean')
+M1 /= M1.max()
+
+# loss matrix
+M2 = ot.dist(xs, xt, metric='sqeuclidean')
+M2 /= M2.max()
+
+# loss matrix
+Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
+Mp /= Mp.max()
+
+# Data
+pl.figure(1, figsize=(7, 3))
+pl.clf()
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+pl.title('Source and traget distributions')
+
+
+# Cost matrices
+pl.figure(2, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+pl.imshow(M1, interpolation='nearest')
+pl.title('Euclidean cost')
+
+pl.subplot(1, 3, 2)
+pl.imshow(M2, interpolation='nearest')
+pl.title('Squared Euclidean cost')
+
+pl.subplot(1, 3, 3)
+pl.imshow(Mp, interpolation='nearest')
+pl.title('Sqrt Euclidean cost')
+pl.tight_layout()
+
+##############################################################################
+# Dataset 1 : Plot OT Matrices
+##############################################################################
+
+
+
+#%% EMD
+G1 = ot.emd(a, b, M1)
+G2 = ot.emd(a, b, M2)
+Gp = ot.emd(a, b, Mp)
+
+# OT matrices
+pl.figure(3, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT Euclidean')
+
+pl.subplot(1, 3, 2)
+ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT squared Euclidean')
+
+pl.subplot(1, 3, 3)
+ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT sqrt Euclidean')
+pl.tight_layout()
+
+pl.show()
+
+
+##############################################################################
+# Dataset 2 : Partial circle
+##############################################################################
+
+n = 50 # nb samples
+xtot = np.zeros((n + 1, 2))
+xtot[:, 0] = np.cos(
+ (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
+xtot[:, 1] = np.sin(
+ (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
+
+xs = xtot[:n, :]
+xt = xtot[1:, :]
+
+a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
+
+# loss matrix
+M1 = ot.dist(xs, xt, metric='euclidean')
+M1 /= M1.max()
+
+# loss matrix
+M2 = ot.dist(xs, xt, metric='sqeuclidean')
+M2 /= M2.max()
+
+# loss matrix
+Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
+Mp /= Mp.max()
+
+
+# Data
+pl.figure(4, figsize=(7, 3))
+pl.clf()
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+pl.title('Source and traget distributions')
+
+
+# Cost matrices
+pl.figure(5, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+pl.imshow(M1, interpolation='nearest')
+pl.title('Euclidean cost')
+
+pl.subplot(1, 3, 2)
+pl.imshow(M2, interpolation='nearest')
+pl.title('Squared Euclidean cost')
+
+pl.subplot(1, 3, 3)
+pl.imshow(Mp, interpolation='nearest')
+pl.title('Sqrt Euclidean cost')
+pl.tight_layout()
+
+##############################################################################
+# Dataset 2 : Plot OT Matrices
+##############################################################################
+
+
+
+#%% EMD
+G1 = ot.emd(a, b, M1)
+G2 = ot.emd(a, b, M2)
+Gp = ot.emd(a, b, Mp)
+
+# OT matrices
+pl.figure(6, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT Euclidean')
+
+pl.subplot(1, 3, 2)
+ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT squared Euclidean')
+
+pl.subplot(1, 3, 3)
+ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT sqrt Euclidean')
+pl.tight_layout()
pl.show()
diff --git a/docs/source/auto_examples/plot_OT_L1_vs_L2.rst b/docs/source/auto_examples/plot_OT_L1_vs_L2.rst
index ba52bfe..83a7491 100644
--- a/docs/source/auto_examples/plot_OT_L1_vs_L2.rst
+++ b/docs/source/auto_examples/plot_OT_L1_vs_L2.rst
@@ -7,6 +7,8 @@
2D Optimal transport for different metrics
==========================================
+2D OT on empirical distributio with different gound metric.
+
Stole the figure idea from Fig. 1 and 2 in
https://arxiv.org/pdf/1706.07650.pdf
@@ -14,6 +16,80 @@ https://arxiv.org/pdf/1706.07650.pdf
+.. code-block:: python
+
+
+ # Author: Remi Flamary <remi.flamary@unice.fr>
+ #
+ # License: MIT License
+
+ import numpy as np
+ import matplotlib.pylab as pl
+ import ot
+
+
+
+
+
+
+
+Dataset 1 : uniform sampling
+#############################################################################
+
+
+
+.. code-block:: python
+
+
+ n = 20 # nb samples
+ xs = np.zeros((n, 2))
+ xs[:, 0] = np.arange(n) + 1
+ xs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...
+
+ xt = np.zeros((n, 2))
+ xt[:, 1] = np.arange(n) + 1
+
+ a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
+
+ # loss matrix
+ M1 = ot.dist(xs, xt, metric='euclidean')
+ M1 /= M1.max()
+
+ # loss matrix
+ M2 = ot.dist(xs, xt, metric='sqeuclidean')
+ M2 /= M2.max()
+
+ # loss matrix
+ Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
+ Mp /= Mp.max()
+
+ # Data
+ pl.figure(1, figsize=(7, 3))
+ pl.clf()
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ pl.title('Source and traget distributions')
+
+
+ # Cost matrices
+ pl.figure(2, figsize=(7, 3))
+
+ pl.subplot(1, 3, 1)
+ pl.imshow(M1, interpolation='nearest')
+ pl.title('Euclidean cost')
+
+ pl.subplot(1, 3, 2)
+ pl.imshow(M2, interpolation='nearest')
+ pl.title('Squared Euclidean cost')
+
+ pl.subplot(1, 3, 3)
+ pl.imshow(Mp, interpolation='nearest')
+ pl.title('Sqrt Euclidean cost')
+ pl.tight_layout()
+
+
+
.. rst-class:: sphx-glr-horizontal
@@ -28,138 +104,195 @@ https://arxiv.org/pdf/1706.07650.pdf
.. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_002.png
:scale: 47
- *
- .. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_003.png
- :scale: 47
- *
- .. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_004.png
- :scale: 47
+Dataset 1 : Plot OT Matrices
+#############################################################################
+
+
+
+.. code-block:: python
+
+
+
+
+ #%% EMD
+ G1 = ot.emd(a, b, M1)
+ G2 = ot.emd(a, b, M2)
+ Gp = ot.emd(a, b, Mp)
+
+ # OT matrices
+ pl.figure(3, figsize=(7, 3))
+
+ pl.subplot(1, 3, 1)
+ ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ # pl.legend(loc=0)
+ pl.title('OT Euclidean')
+
+ pl.subplot(1, 3, 2)
+ ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ # pl.legend(loc=0)
+ pl.title('OT squared Euclidean')
+
+ pl.subplot(1, 3, 3)
+ ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ # pl.legend(loc=0)
+ pl.title('OT sqrt Euclidean')
+ pl.tight_layout()
+
+ pl.show()
+
+
+
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_005.png
+ :align: center
+
+
+
+
+Dataset 2 : Partial circle
+#############################################################################
+
+
+
+.. code-block:: python
+
+
+ n = 50 # nb samples
+ xtot = np.zeros((n + 1, 2))
+ xtot[:, 0] = np.cos(
+ (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
+ xtot[:, 1] = np.sin(
+ (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
+
+ xs = xtot[:n, :]
+ xt = xtot[1:, :]
+
+ a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
+
+ # loss matrix
+ M1 = ot.dist(xs, xt, metric='euclidean')
+ M1 /= M1.max()
+
+ # loss matrix
+ M2 = ot.dist(xs, xt, metric='sqeuclidean')
+ M2 /= M2.max()
+
+ # loss matrix
+ Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
+ Mp /= Mp.max()
+
+
+ # Data
+ pl.figure(4, figsize=(7, 3))
+ pl.clf()
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ pl.title('Source and traget distributions')
+
+
+ # Cost matrices
+ pl.figure(5, figsize=(7, 3))
+
+ pl.subplot(1, 3, 1)
+ pl.imshow(M1, interpolation='nearest')
+ pl.title('Euclidean cost')
+
+ pl.subplot(1, 3, 2)
+ pl.imshow(M2, interpolation='nearest')
+ pl.title('Squared Euclidean cost')
+
+ pl.subplot(1, 3, 3)
+ pl.imshow(Mp, interpolation='nearest')
+ pl.title('Sqrt Euclidean cost')
+ pl.tight_layout()
+
+
+
+
+.. rst-class:: sphx-glr-horizontal
+
*
- .. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_005.png
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_007.png
:scale: 47
*
- .. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_006.png
+ .. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_008.png
:scale: 47
+Dataset 2 : Plot OT Matrices
+#############################################################################
+
+
.. code-block:: python
- # Author: Remi Flamary <remi.flamary@unice.fr>
- #
- # License: MIT License
- import numpy as np
- import matplotlib.pylab as pl
- import ot
- #%% parameters and data generation
-
- for data in range(2):
-
- if data:
- n = 20 # nb samples
- xs = np.zeros((n, 2))
- xs[:, 0] = np.arange(n) + 1
- xs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...
-
- xt = np.zeros((n, 2))
- xt[:, 1] = np.arange(n) + 1
- else:
-
- n = 50 # nb samples
- xtot = np.zeros((n + 1, 2))
- xtot[:, 0] = np.cos(
- (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
- xtot[:, 1] = np.sin(
- (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
-
- xs = xtot[:n, :]
- xt = xtot[1:, :]
-
- a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
-
- # loss matrix
- M1 = ot.dist(xs, xt, metric='euclidean')
- M1 /= M1.max()
-
- # loss matrix
- M2 = ot.dist(xs, xt, metric='sqeuclidean')
- M2 /= M2.max()
-
- # loss matrix
- Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
- Mp /= Mp.max()
-
- #%% plot samples
-
- pl.figure(1 + 3 * data, figsize=(7, 3))
- pl.clf()
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- pl.title('Source and traget distributions')
-
- pl.figure(2 + 3 * data, figsize=(7, 3))
-
- pl.subplot(1, 3, 1)
- pl.imshow(M1, interpolation='nearest')
- pl.title('Euclidean cost')
-
- pl.subplot(1, 3, 2)
- pl.imshow(M2, interpolation='nearest')
- pl.title('Squared Euclidean cost')
-
- pl.subplot(1, 3, 3)
- pl.imshow(Mp, interpolation='nearest')
- pl.title('Sqrt Euclidean cost')
- pl.tight_layout()
-
- #%% EMD
- G1 = ot.emd(a, b, M1)
- G2 = ot.emd(a, b, M2)
- Gp = ot.emd(a, b, Mp)
-
- pl.figure(3 + 3 * data, figsize=(7, 3))
-
- pl.subplot(1, 3, 1)
- ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT Euclidean')
-
- pl.subplot(1, 3, 2)
- ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT squared Euclidean')
-
- pl.subplot(1, 3, 3)
- ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT sqrt Euclidean')
- pl.tight_layout()
+ #%% EMD
+ G1 = ot.emd(a, b, M1)
+ G2 = ot.emd(a, b, M2)
+ Gp = ot.emd(a, b, Mp)
+
+ # OT matrices
+ pl.figure(6, figsize=(7, 3))
+
+ pl.subplot(1, 3, 1)
+ ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ # pl.legend(loc=0)
+ pl.title('OT Euclidean')
+
+ pl.subplot(1, 3, 2)
+ ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ # pl.legend(loc=0)
+ pl.title('OT squared Euclidean')
+
+ pl.subplot(1, 3, 3)
+ ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
+ pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+ pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+ pl.axis('equal')
+ # pl.legend(loc=0)
+ pl.title('OT sqrt Euclidean')
+ pl.tight_layout()
pl.show()
-**Total running time of the script:** ( 0 minutes 1.906 seconds)
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_OT_L1_vs_L2_011.png
+ :align: center
+
+
+
+
+**Total running time of the script:** ( 0 minutes 1.217 seconds)
@@ -178,4 +311,4 @@ https://arxiv.org/pdf/1706.07650.pdf
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_OT_conv.ipynb b/docs/source/auto_examples/plot_OT_conv.ipynb
deleted file mode 100644
index 7fc4af0..0000000
--- a/docs/source/auto_examples/plot_OT_conv.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n# 1D Wasserstein barycenter demo\n\n\n\n@author: rflamary\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "import numpy as np\nimport matplotlib.pylab as pl\nimport ot\nfrom mpl_toolkits.mplot3d import Axes3D #necessary for 3d plot even if not used\nimport scipy as sp\nimport scipy.signal as sps\n#%% parameters\n\nn=10 # nb bins\n\n# bin positions\nx=np.arange(n,dtype=np.float64)\n\nxx,yy=np.meshgrid(x,x)\n\n\nxpos=np.hstack((xx.reshape(-1,1),yy.reshape(-1,1)))\n\nM=ot.dist(xpos)\n\n\nI0=((xx-5)**2+(yy-5)**2<3**2)*1.0\nI1=((xx-7)**2+(yy-7)**2<3**2)*1.0\n\nI0/=I0.sum()\nI1/=I1.sum()\n\ni0=I0.ravel()\ni1=I1.ravel()\n\nM=M[i0>0,:][:,i1>0].copy()\ni0=i0[i0>0]\ni1=i1[i1>0]\nItot=np.concatenate((I0[:,:,np.newaxis],I1[:,:,np.newaxis]),2)\n\n\n#%% plot the distributions\n\npl.figure(1)\npl.subplot(2,2,1)\npl.imshow(I0)\npl.subplot(2,2,2)\npl.imshow(I1)\n\n\n#%% barycenter computation\n\nalpha=0.5 # 0<=alpha<=1\nweights=np.array([1-alpha,alpha])\n\n\ndef conv2(I,k):\n return sp.ndimage.convolve1d(sp.ndimage.convolve1d(I,k,axis=1),k,axis=0)\n\ndef conv2n(I,k):\n res=np.zeros_like(I)\n for i in range(I.shape[2]):\n res[:,:,i]=conv2(I[:,:,i],k)\n return res\n\n\ndef get_1Dkernel(reg,thr=1e-16,wmax=1024):\n w=max(min(wmax,2*int((-np.log(thr)*reg)**(.5))),3)\n x=np.arange(w,dtype=np.float64)\n return np.exp(-((x-w/2)**2)/reg)\n \nthr=1e-16\nreg=1e0\n\nk=get_1Dkernel(reg)\npl.figure(2)\npl.plot(k)\n\nI05=conv2(I0,k)\n\npl.figure(1)\npl.subplot(2,2,1)\npl.imshow(I0)\npl.subplot(2,2,2)\npl.imshow(I05)\n\n#%%\n\nG=ot.emd(i0,i1,M)\nr0=np.sum(M*G)\n\nreg=1e-1\nGs=ot.bregman.sinkhorn_knopp(i0,i1,M,reg=reg)\nrs=np.sum(M*Gs)\n\n#%%\n\ndef mylog(u):\n tmp=np.log(u)\n tmp[np.isnan(tmp)]=0\n return tmp\n\ndef sinkhorn_conv(a,b, reg, numItermax = 1000, stopThr=1e-9, verbose=False, log=False,**kwargs):\n\n\n a=np.asarray(a,dtype=np.float64)\n b=np.asarray(b,dtype=np.float64)\n \n \n if len(b.shape)>2:\n nbb=b.shape[2]\n a=a[:,:,np.newaxis]\n else:\n nbb=0\n \n\n if log:\n log={'err':[]}\n\n # we assume that no distances are null except those of the diagonal of distances\n if nbb:\n u = np.ones((a.shape[0],a.shape[1],nbb))/(np.prod(a.shape[:2]))\n v = np.ones((a.shape[0],a.shape[1],nbb))/(np.prod(b.shape[:2]))\n a0=1.0/(np.prod(b.shape[:2]))\n else:\n u = np.ones((a.shape[0],a.shape[1]))/(np.prod(a.shape[:2]))\n v = np.ones((a.shape[0],a.shape[1]))/(np.prod(b.shape[:2]))\n a0=1.0/(np.prod(b.shape[:2]))\n \n \n k=get_1Dkernel(reg)\n \n if nbb:\n K=lambda I: conv2n(I,k)\n else:\n K=lambda I: conv2(I,k)\n\n cpt = 0\n err=1\n while (err>stopThr and cpt<numItermax):\n uprev = u\n vprev = v\n \n v = np.divide(b, K(u))\n u = np.divide(a, K(v))\n\n if (np.any(np.isnan(u)) or np.any(np.isnan(v)) \n or np.any(np.isinf(u)) or np.any(np.isinf(v))):\n # we have reached the machine precision\n # come back to previous solution and quit loop\n print('Warning: numerical errors at iteration', cpt)\n u = uprev\n v = vprev\n break\n if cpt%10==0:\n # we can speed up the process by checking for the error only all the 10th iterations\n\n err = np.sum((u-uprev)**2)/np.sum((u)**2)+np.sum((v-vprev)**2)/np.sum((v)**2)\n\n if log:\n log['err'].append(err)\n\n if verbose:\n if cpt%200 ==0:\n print('{:5s}|{:12s}'.format('It.','Err')+'\\n'+'-'*19)\n print('{:5d}|{:8e}|'.format(cpt,err))\n cpt = cpt +1\n if log:\n log['u']=u\n log['v']=v\n \n if nbb: #return only loss \n res=np.zeros((nbb))\n for i in range(nbb):\n res[i]=np.sum(u[:,i].reshape((-1,1))*K*v[:,i].reshape((1,-1))*M)\n if log:\n return res,log\n else:\n return res \n \n else: # return OT matrix\n res=reg*a0*np.sum(a*mylog(u+(u==0))+b*mylog(v+(v==0)))\n if log:\n \n return res,log\n else:\n return res\n\nreg=1e0\nr,log=sinkhorn_conv(I0,I1,reg,verbose=True,log=True)\na=I0\nb=I1\nu=log['u']\nv=log['v']\n#%% barycenter interpolation"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.12",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-} \ No newline at end of file
diff --git a/docs/source/auto_examples/plot_OT_conv.py b/docs/source/auto_examples/plot_OT_conv.py
deleted file mode 100644
index a86e7a2..0000000
--- a/docs/source/auto_examples/plot_OT_conv.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-==============================
-1D Wasserstein barycenter demo
-==============================
-
-
-@author: rflamary
-"""
-
-import numpy as np
-import matplotlib.pylab as pl
-import ot
-from mpl_toolkits.mplot3d import Axes3D #necessary for 3d plot even if not used
-import scipy as sp
-import scipy.signal as sps
-#%% parameters
-
-n=10 # nb bins
-
-# bin positions
-x=np.arange(n,dtype=np.float64)
-
-xx,yy=np.meshgrid(x,x)
-
-
-xpos=np.hstack((xx.reshape(-1,1),yy.reshape(-1,1)))
-
-M=ot.dist(xpos)
-
-
-I0=((xx-5)**2+(yy-5)**2<3**2)*1.0
-I1=((xx-7)**2+(yy-7)**2<3**2)*1.0
-
-I0/=I0.sum()
-I1/=I1.sum()
-
-i0=I0.ravel()
-i1=I1.ravel()
-
-M=M[i0>0,:][:,i1>0].copy()
-i0=i0[i0>0]
-i1=i1[i1>0]
-Itot=np.concatenate((I0[:,:,np.newaxis],I1[:,:,np.newaxis]),2)
-
-
-#%% plot the distributions
-
-pl.figure(1)
-pl.subplot(2,2,1)
-pl.imshow(I0)
-pl.subplot(2,2,2)
-pl.imshow(I1)
-
-
-#%% barycenter computation
-
-alpha=0.5 # 0<=alpha<=1
-weights=np.array([1-alpha,alpha])
-
-
-def conv2(I,k):
- return sp.ndimage.convolve1d(sp.ndimage.convolve1d(I,k,axis=1),k,axis=0)
-
-def conv2n(I,k):
- res=np.zeros_like(I)
- for i in range(I.shape[2]):
- res[:,:,i]=conv2(I[:,:,i],k)
- return res
-
-
-def get_1Dkernel(reg,thr=1e-16,wmax=1024):
- w=max(min(wmax,2*int((-np.log(thr)*reg)**(.5))),3)
- x=np.arange(w,dtype=np.float64)
- return np.exp(-((x-w/2)**2)/reg)
-
-thr=1e-16
-reg=1e0
-
-k=get_1Dkernel(reg)
-pl.figure(2)
-pl.plot(k)
-
-I05=conv2(I0,k)
-
-pl.figure(1)
-pl.subplot(2,2,1)
-pl.imshow(I0)
-pl.subplot(2,2,2)
-pl.imshow(I05)
-
-#%%
-
-G=ot.emd(i0,i1,M)
-r0=np.sum(M*G)
-
-reg=1e-1
-Gs=ot.bregman.sinkhorn_knopp(i0,i1,M,reg=reg)
-rs=np.sum(M*Gs)
-
-#%%
-
-def mylog(u):
- tmp=np.log(u)
- tmp[np.isnan(tmp)]=0
- return tmp
-
-def sinkhorn_conv(a,b, reg, numItermax = 1000, stopThr=1e-9, verbose=False, log=False,**kwargs):
-
-
- a=np.asarray(a,dtype=np.float64)
- b=np.asarray(b,dtype=np.float64)
-
-
- if len(b.shape)>2:
- nbb=b.shape[2]
- a=a[:,:,np.newaxis]
- else:
- nbb=0
-
-
- if log:
- log={'err':[]}
-
- # we assume that no distances are null except those of the diagonal of distances
- if nbb:
- u = np.ones((a.shape[0],a.shape[1],nbb))/(np.prod(a.shape[:2]))
- v = np.ones((a.shape[0],a.shape[1],nbb))/(np.prod(b.shape[:2]))
- a0=1.0/(np.prod(b.shape[:2]))
- else:
- u = np.ones((a.shape[0],a.shape[1]))/(np.prod(a.shape[:2]))
- v = np.ones((a.shape[0],a.shape[1]))/(np.prod(b.shape[:2]))
- a0=1.0/(np.prod(b.shape[:2]))
-
-
- k=get_1Dkernel(reg)
-
- if nbb:
- K=lambda I: conv2n(I,k)
- else:
- K=lambda I: conv2(I,k)
-
- cpt = 0
- err=1
- while (err>stopThr and cpt<numItermax):
- uprev = u
- vprev = v
-
- v = np.divide(b, K(u))
- u = np.divide(a, K(v))
-
- if (np.any(np.isnan(u)) or np.any(np.isnan(v))
- or np.any(np.isinf(u)) or np.any(np.isinf(v))):
- # we have reached the machine precision
- # come back to previous solution and quit loop
- print('Warning: numerical errors at iteration', cpt)
- u = uprev
- v = vprev
- break
- if cpt%10==0:
- # we can speed up the process by checking for the error only all the 10th iterations
-
- err = np.sum((u-uprev)**2)/np.sum((u)**2)+np.sum((v-vprev)**2)/np.sum((v)**2)
-
- if log:
- log['err'].append(err)
-
- if verbose:
- if cpt%200 ==0:
- print('{:5s}|{:12s}'.format('It.','Err')+'\n'+'-'*19)
- print('{:5d}|{:8e}|'.format(cpt,err))
- cpt = cpt +1
- if log:
- log['u']=u
- log['v']=v
-
- if nbb: #return only loss
- res=np.zeros((nbb))
- for i in range(nbb):
- res[i]=np.sum(u[:,i].reshape((-1,1))*K*v[:,i].reshape((1,-1))*M)
- if log:
- return res,log
- else:
- return res
-
- else: # return OT matrix
- res=reg*a0*np.sum(a*mylog(u+(u==0))+b*mylog(v+(v==0)))
- if log:
-
- return res,log
- else:
- return res
-
-reg=1e0
-r,log=sinkhorn_conv(I0,I1,reg,verbose=True,log=True)
-a=I0
-b=I1
-u=log['u']
-v=log['v']
-#%% barycenter interpolation
diff --git a/docs/source/auto_examples/plot_OT_conv.rst b/docs/source/auto_examples/plot_OT_conv.rst
deleted file mode 100644
index 039bbdb..0000000
--- a/docs/source/auto_examples/plot_OT_conv.rst
+++ /dev/null
@@ -1,241 +0,0 @@
-
-
-.. _sphx_glr_auto_examples_plot_OT_conv.py:
-
-
-==============================
-1D Wasserstein barycenter demo
-==============================
-
-
-@author: rflamary
-
-
-
-
-.. code-block:: pytb
-
- Traceback (most recent call last):
- File "/home/rflamary/.local/lib/python2.7/site-packages/sphinx_gallery/gen_rst.py", line 518, in execute_code_block
- exec(code_block, example_globals)
- File "<string>", line 86, in <module>
- TypeError: unsupported operand type(s) for *: 'float' and 'Mock'
-
-
-
-
-
-.. code-block:: python
-
-
- import numpy as np
- import matplotlib.pylab as pl
- import ot
- from mpl_toolkits.mplot3d import Axes3D #necessary for 3d plot even if not used
- import scipy as sp
- import scipy.signal as sps
- #%% parameters
-
- n=10 # nb bins
-
- # bin positions
- x=np.arange(n,dtype=np.float64)
-
- xx,yy=np.meshgrid(x,x)
-
-
- xpos=np.hstack((xx.reshape(-1,1),yy.reshape(-1,1)))
-
- M=ot.dist(xpos)
-
-
- I0=((xx-5)**2+(yy-5)**2<3**2)*1.0
- I1=((xx-7)**2+(yy-7)**2<3**2)*1.0
-
- I0/=I0.sum()
- I1/=I1.sum()
-
- i0=I0.ravel()
- i1=I1.ravel()
-
- M=M[i0>0,:][:,i1>0].copy()
- i0=i0[i0>0]
- i1=i1[i1>0]
- Itot=np.concatenate((I0[:,:,np.newaxis],I1[:,:,np.newaxis]),2)
-
-
- #%% plot the distributions
-
- pl.figure(1)
- pl.subplot(2,2,1)
- pl.imshow(I0)
- pl.subplot(2,2,2)
- pl.imshow(I1)
-
-
- #%% barycenter computation
-
- alpha=0.5 # 0<=alpha<=1
- weights=np.array([1-alpha,alpha])
-
-
- def conv2(I,k):
- return sp.ndimage.convolve1d(sp.ndimage.convolve1d(I,k,axis=1),k,axis=0)
-
- def conv2n(I,k):
- res=np.zeros_like(I)
- for i in range(I.shape[2]):
- res[:,:,i]=conv2(I[:,:,i],k)
- return res
-
-
- def get_1Dkernel(reg,thr=1e-16,wmax=1024):
- w=max(min(wmax,2*int((-np.log(thr)*reg)**(.5))),3)
- x=np.arange(w,dtype=np.float64)
- return np.exp(-((x-w/2)**2)/reg)
-
- thr=1e-16
- reg=1e0
-
- k=get_1Dkernel(reg)
- pl.figure(2)
- pl.plot(k)
-
- I05=conv2(I0,k)
-
- pl.figure(1)
- pl.subplot(2,2,1)
- pl.imshow(I0)
- pl.subplot(2,2,2)
- pl.imshow(I05)
-
- #%%
-
- G=ot.emd(i0,i1,M)
- r0=np.sum(M*G)
-
- reg=1e-1
- Gs=ot.bregman.sinkhorn_knopp(i0,i1,M,reg=reg)
- rs=np.sum(M*Gs)
-
- #%%
-
- def mylog(u):
- tmp=np.log(u)
- tmp[np.isnan(tmp)]=0
- return tmp
-
- def sinkhorn_conv(a,b, reg, numItermax = 1000, stopThr=1e-9, verbose=False, log=False,**kwargs):
-
-
- a=np.asarray(a,dtype=np.float64)
- b=np.asarray(b,dtype=np.float64)
-
-
- if len(b.shape)>2:
- nbb=b.shape[2]
- a=a[:,:,np.newaxis]
- else:
- nbb=0
-
-
- if log:
- log={'err':[]}
-
- # we assume that no distances are null except those of the diagonal of distances
- if nbb:
- u = np.ones((a.shape[0],a.shape[1],nbb))/(np.prod(a.shape[:2]))
- v = np.ones((a.shape[0],a.shape[1],nbb))/(np.prod(b.shape[:2]))
- a0=1.0/(np.prod(b.shape[:2]))
- else:
- u = np.ones((a.shape[0],a.shape[1]))/(np.prod(a.shape[:2]))
- v = np.ones((a.shape[0],a.shape[1]))/(np.prod(b.shape[:2]))
- a0=1.0/(np.prod(b.shape[:2]))
-
-
- k=get_1Dkernel(reg)
-
- if nbb:
- K=lambda I: conv2n(I,k)
- else:
- K=lambda I: conv2(I,k)
-
- cpt = 0
- err=1
- while (err>stopThr and cpt<numItermax):
- uprev = u
- vprev = v
-
- v = np.divide(b, K(u))
- u = np.divide(a, K(v))
-
- if (np.any(np.isnan(u)) or np.any(np.isnan(v))
- or np.any(np.isinf(u)) or np.any(np.isinf(v))):
- # we have reached the machine precision
- # come back to previous solution and quit loop
- print('Warning: numerical errors at iteration', cpt)
- u = uprev
- v = vprev
- break
- if cpt%10==0:
- # we can speed up the process by checking for the error only all the 10th iterations
-
- err = np.sum((u-uprev)**2)/np.sum((u)**2)+np.sum((v-vprev)**2)/np.sum((v)**2)
-
- if log:
- log['err'].append(err)
-
- if verbose:
- if cpt%200 ==0:
- print('{:5s}|{:12s}'.format('It.','Err')+'\n'+'-'*19)
- print('{:5d}|{:8e}|'.format(cpt,err))
- cpt = cpt +1
- if log:
- log['u']=u
- log['v']=v
-
- if nbb: #return only loss
- res=np.zeros((nbb))
- for i in range(nbb):
- res[i]=np.sum(u[:,i].reshape((-1,1))*K*v[:,i].reshape((1,-1))*M)
- if log:
- return res,log
- else:
- return res
-
- else: # return OT matrix
- res=reg*a0*np.sum(a*mylog(u+(u==0))+b*mylog(v+(v==0)))
- if log:
-
- return res,log
- else:
- return res
-
- reg=1e0
- r,log=sinkhorn_conv(I0,I1,reg,verbose=True,log=True)
- a=I0
- b=I1
- u=log['u']
- v=log['v']
- #%% barycenter interpolation
-
-**Total running time of the script:** ( 0 minutes 0.000 seconds)
-
-
-
-.. container:: sphx-glr-footer
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Python source code: plot_OT_conv.py <plot_OT_conv.py>`
-
-
-
- .. container:: sphx-glr-download
-
- :download:`Download Jupyter notebook: plot_OT_conv.ipynb <plot_OT_conv.ipynb>`
-
-.. rst-class:: sphx-glr-signature
-
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_WDA.ipynb b/docs/source/auto_examples/plot_WDA.ipynb
index 5568128..8e0db41 100644
--- a/docs/source/auto_examples/plot_WDA.ipynb
+++ b/docs/source/auto_examples/plot_WDA.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n# Wasserstein Discriminant Analysis\n\n\n\n"
+ "\n# Wasserstein Discriminant Analysis\n\n\nThis example illustrate the use of WDA as proposed in [11].\n\n\n[11] Flamary, R., Cuturi, M., Courty, N., & Rakotomamonjy, A. (2016). \nWasserstein Discriminant Analysis.\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -24,7 +24,97 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\n\nfrom ot.dr import wda, fda\n\n\n#%% parameters\n\nn = 1000 # nb samples in source and target datasets\nnz = 0.2\n\n# generate circle dataset\nt = np.random.rand(n) * 2 * np.pi\nys = np.floor((np.arange(n) * 1.0 / n * 3)) + 1\nxs = np.concatenate(\n (np.cos(t).reshape((-1, 1)), np.sin(t).reshape((-1, 1))), 1)\nxs = xs * ys.reshape(-1, 1) + nz * np.random.randn(n, 2)\n\nt = np.random.rand(n) * 2 * np.pi\nyt = np.floor((np.arange(n) * 1.0 / n * 3)) + 1\nxt = np.concatenate(\n (np.cos(t).reshape((-1, 1)), np.sin(t).reshape((-1, 1))), 1)\nxt = xt * yt.reshape(-1, 1) + nz * np.random.randn(n, 2)\n\nnbnoise = 8\n\nxs = np.hstack((xs, np.random.randn(n, nbnoise)))\nxt = np.hstack((xt, np.random.randn(n, nbnoise)))\n\n#%% plot samples\npl.figure(1, figsize=(6.4, 3.5))\n\npl.subplot(1, 2, 1)\npl.scatter(xt[:, 0], xt[:, 1], c=ys, marker='+', label='Source samples')\npl.legend(loc=0)\npl.title('Discriminant dimensions')\n\npl.subplot(1, 2, 2)\npl.scatter(xt[:, 2], xt[:, 3], c=ys, marker='+', label='Source samples')\npl.legend(loc=0)\npl.title('Other dimensions')\npl.tight_layout()\n\n#%% Compute FDA\np = 2\n\nPfda, projfda = fda(xs, ys, p)\n\n#%% Compute WDA\np = 2\nreg = 1e0\nk = 10\nmaxiter = 100\n\nPwda, projwda = wda(xs, ys, p, reg, k, maxiter=maxiter)\n\n#%% plot samples\n\nxsp = projfda(xs)\nxtp = projfda(xt)\n\nxspw = projwda(xs)\nxtpw = projwda(xt)\n\npl.figure(2)\n\npl.subplot(2, 2, 1)\npl.scatter(xsp[:, 0], xsp[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected training samples FDA')\n\npl.subplot(2, 2, 2)\npl.scatter(xtp[:, 0], xtp[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected test samples FDA')\n\npl.subplot(2, 2, 3)\npl.scatter(xspw[:, 0], xspw[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected training samples WDA')\n\npl.subplot(2, 2, 4)\npl.scatter(xtpw[:, 0], xtpw[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected test samples WDA')\npl.tight_layout()\n\npl.show()"
+ "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\n\nfrom ot.dr import wda, fda"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Generate data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% parameters\n\nn = 1000 # nb samples in source and target datasets\nnz = 0.2\n\n# generate circle dataset\nt = np.random.rand(n) * 2 * np.pi\nys = np.floor((np.arange(n) * 1.0 / n * 3)) + 1\nxs = np.concatenate(\n (np.cos(t).reshape((-1, 1)), np.sin(t).reshape((-1, 1))), 1)\nxs = xs * ys.reshape(-1, 1) + nz * np.random.randn(n, 2)\n\nt = np.random.rand(n) * 2 * np.pi\nyt = np.floor((np.arange(n) * 1.0 / n * 3)) + 1\nxt = np.concatenate(\n (np.cos(t).reshape((-1, 1)), np.sin(t).reshape((-1, 1))), 1)\nxt = xt * yt.reshape(-1, 1) + nz * np.random.randn(n, 2)\n\nnbnoise = 8\n\nxs = np.hstack((xs, np.random.randn(n, nbnoise)))\nxt = np.hstack((xt, np.random.randn(n, nbnoise)))"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Plot data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% plot samples\npl.figure(1, figsize=(6.4, 3.5))\n\npl.subplot(1, 2, 1)\npl.scatter(xt[:, 0], xt[:, 1], c=ys, marker='+', label='Source samples')\npl.legend(loc=0)\npl.title('Discriminant dimensions')\n\npl.subplot(1, 2, 2)\npl.scatter(xt[:, 2], xt[:, 3], c=ys, marker='+', label='Source samples')\npl.legend(loc=0)\npl.title('Other dimensions')\npl.tight_layout()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Compute Fisher Discriminant Analysis\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% Compute FDA\np = 2\n\nPfda, projfda = fda(xs, ys, p)"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Compute Wasserstein Discriminant Analysis\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% Compute WDA\np = 2\nreg = 1e0\nk = 10\nmaxiter = 100\n\nPwda, projwda = wda(xs, ys, p, reg, k, maxiter=maxiter)"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Plot 2D projections\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% plot samples\n\nxsp = projfda(xs)\nxtp = projfda(xt)\n\nxspw = projwda(xs)\nxtpw = projwda(xt)\n\npl.figure(2)\n\npl.subplot(2, 2, 1)\npl.scatter(xsp[:, 0], xsp[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected training samples FDA')\n\npl.subplot(2, 2, 2)\npl.scatter(xtp[:, 0], xtp[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected test samples FDA')\n\npl.subplot(2, 2, 3)\npl.scatter(xspw[:, 0], xspw[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected training samples WDA')\n\npl.subplot(2, 2, 4)\npl.scatter(xtpw[:, 0], xtpw[:, 1], c=ys, marker='+', label='Projected samples')\npl.legend(loc=0)\npl.title('Projected test samples WDA')\npl.tight_layout()\n\npl.show()"
],
"outputs": [],
"metadata": {
diff --git a/docs/source/auto_examples/plot_WDA.py b/docs/source/auto_examples/plot_WDA.py
index 42789f2..06a2e38 100644
--- a/docs/source/auto_examples/plot_WDA.py
+++ b/docs/source/auto_examples/plot_WDA.py
@@ -4,6 +4,12 @@
Wasserstein Discriminant Analysis
=================================
+This example illustrate the use of WDA as proposed in [11].
+
+
+[11] Flamary, R., Cuturi, M., Courty, N., & Rakotomamonjy, A. (2016).
+Wasserstein Discriminant Analysis.
+
"""
# Author: Remi Flamary <remi.flamary@unice.fr>
@@ -16,6 +22,10 @@ import matplotlib.pylab as pl
from ot.dr import wda, fda
+##############################################################################
+# Generate data
+##############################################################################
+
#%% parameters
n = 1000 # nb samples in source and target datasets
@@ -39,6 +49,10 @@ nbnoise = 8
xs = np.hstack((xs, np.random.randn(n, nbnoise)))
xt = np.hstack((xt, np.random.randn(n, nbnoise)))
+##############################################################################
+# Plot data
+##############################################################################
+
#%% plot samples
pl.figure(1, figsize=(6.4, 3.5))
@@ -53,11 +67,19 @@ pl.legend(loc=0)
pl.title('Other dimensions')
pl.tight_layout()
+##############################################################################
+# Compute Fisher Discriminant Analysis
+##############################################################################
+
#%% Compute FDA
p = 2
Pfda, projfda = fda(xs, ys, p)
+##############################################################################
+# Compute Wasserstein Discriminant Analysis
+##############################################################################
+
#%% Compute WDA
p = 2
reg = 1e0
@@ -66,6 +88,11 @@ maxiter = 100
Pwda, projwda = wda(xs, ys, p, reg, k, maxiter=maxiter)
+
+##############################################################################
+# Plot 2D projections
+##############################################################################
+
#%% plot samples
xsp = projfda(xs)
diff --git a/docs/source/auto_examples/plot_WDA.rst b/docs/source/auto_examples/plot_WDA.rst
index 76ebaf5..8c9ee29 100644
--- a/docs/source/auto_examples/plot_WDA.rst
+++ b/docs/source/auto_examples/plot_WDA.rst
@@ -7,86 +7,40 @@
Wasserstein Discriminant Analysis
=================================
+This example illustrate the use of WDA as proposed in [11].
+[11] Flamary, R., Cuturi, M., Courty, N., & Rakotomamonjy, A. (2016).
+Wasserstein Discriminant Analysis.
-.. rst-class:: sphx-glr-horizontal
- *
+.. code-block:: python
- .. image:: /auto_examples/images/sphx_glr_plot_WDA_001.png
- :scale: 47
- *
+ # Author: Remi Flamary <remi.flamary@unice.fr>
+ #
+ # License: MIT License
- .. image:: /auto_examples/images/sphx_glr_plot_WDA_002.png
- :scale: 47
+ import numpy as np
+ import matplotlib.pylab as pl
+ from ot.dr import wda, fda
-.. rst-class:: sphx-glr-script-out
- Out::
- Compiling cost function...
- Computing gradient of cost function...
- iter cost val grad. norm
- 1 +8.9741888001949222e-01 3.71269078e-01
- 2 +4.9103998133976140e-01 3.46687543e-01
- 3 +4.2142651893148553e-01 1.04789602e-01
- 4 +4.1573609749588841e-01 5.21726648e-02
- 5 +4.1486046805261961e-01 5.35335513e-02
- 6 +4.1315953904635105e-01 2.17803599e-02
- 7 +4.1313030162717523e-01 6.06901182e-02
- 8 +4.1301511591963386e-01 5.88598758e-02
- 9 +4.1258349404769817e-01 5.14307874e-02
- 10 +4.1139242901051226e-01 2.03198793e-02
- 11 +4.1113798965164017e-01 1.18944721e-02
- 12 +4.1103446820878486e-01 2.21783648e-02
- 13 +4.1076586830791861e-01 9.51495863e-03
- 14 +4.1036935287519144e-01 3.74973214e-02
- 15 +4.0958729714575060e-01 1.23810902e-02
- 16 +4.0898266309095005e-01 4.01999918e-02
- 17 +4.0816076944357715e-01 2.27240277e-02
- 18 +4.0788116701894767e-01 4.42815945e-02
- 19 +4.0695443744952403e-01 3.28464304e-02
- 20 +4.0293834480911150e-01 7.76000681e-02
- 21 +3.8488003705202750e-01 1.49378022e-01
- 22 +3.0767344927282614e-01 2.15432117e-01
- 23 +2.3849425361868334e-01 1.07942382e-01
- 24 +2.3845125762548214e-01 1.08953278e-01
- 25 +2.3828007730494005e-01 1.07934830e-01
- 26 +2.3760839060570119e-01 1.03822134e-01
- 27 +2.3514215179705886e-01 8.67263481e-02
- 28 +2.2978886197588613e-01 9.26609306e-03
- 29 +2.2972671019495342e-01 2.59476089e-03
- 30 +2.2972355865247496e-01 1.57205146e-03
- 31 +2.2972296662351968e-01 1.29300760e-03
- 32 +2.2972181557051569e-01 8.82375756e-05
- 33 +2.2972181277025336e-01 6.20536544e-05
- 34 +2.2972181023486152e-01 7.01884014e-06
- 35 +2.2972181020400181e-01 1.60415765e-06
- 36 +2.2972181020236590e-01 2.44290966e-07
- Terminated - min grad norm reached after 36 iterations, 13.41 seconds.
-
-
-
-
-|
-.. code-block:: python
- # Author: Remi Flamary <remi.flamary@unice.fr>
- #
- # License: MIT License
- import numpy as np
- import matplotlib.pylab as pl
+Generate data
+#############################################################################
- from ot.dr import wda, fda
+
+
+.. code-block:: python
#%% parameters
@@ -112,6 +66,20 @@ Wasserstein Discriminant Analysis
xs = np.hstack((xs, np.random.randn(n, nbnoise)))
xt = np.hstack((xt, np.random.randn(n, nbnoise)))
+
+
+
+
+
+
+Plot data
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% plot samples
pl.figure(1, figsize=(6.4, 3.5))
@@ -126,11 +94,42 @@ Wasserstein Discriminant Analysis
pl.title('Other dimensions')
pl.tight_layout()
+
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_WDA_001.png
+ :align: center
+
+
+
+
+Compute Fisher Discriminant Analysis
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% Compute FDA
p = 2
Pfda, projfda = fda(xs, ys, p)
+
+
+
+
+
+
+Compute Wasserstein Discriminant Analysis
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% Compute WDA
p = 2
reg = 1e0
@@ -139,6 +138,45 @@ Wasserstein Discriminant Analysis
Pwda, projwda = wda(xs, ys, p, reg, k, maxiter=maxiter)
+
+
+
+
+
+.. rst-class:: sphx-glr-script-out
+
+ Out::
+
+ Compiling cost function...
+ Computing gradient of cost function...
+ iter cost val grad. norm
+ 1 +7.7038877420882157e-01 6.30647522e-01
+ 2 +3.3969600919721271e-01 2.83791849e-01
+ 3 +3.0014000762425608e-01 2.56139137e-01
+ 4 +2.3397191702411621e-01 6.41134216e-02
+ 5 +2.3107227220070231e-01 2.24837190e-02
+ 6 +2.3072327156158298e-01 1.71334761e-03
+ 7 +2.3072143589220098e-01 6.30059431e-04
+ 8 +2.3072133109125159e-01 4.88673790e-04
+ 9 +2.3072119579341774e-01 1.74129117e-04
+ 10 +2.3072118662364521e-01 1.27046386e-04
+ 11 +2.3072118228917746e-01 9.70877451e-05
+ 12 +2.3072117734120351e-01 4.17292699e-05
+ 13 +2.3072117623493599e-01 4.46062100e-06
+ 14 +2.3072117622383431e-01 1.59801454e-06
+ 15 +2.3072117622300498e-01 1.12117391e-06
+ 16 +2.3072117622220378e-01 4.14581994e-08
+ Terminated - min grad norm reached after 16 iterations, 7.77 seconds.
+
+
+Plot 2D projections
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% plot samples
xsp = projfda(xs)
@@ -172,7 +210,15 @@ Wasserstein Discriminant Analysis
pl.show()
-**Total running time of the script:** ( 0 minutes 19.853 seconds)
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_WDA_003.png
+ :align: center
+
+
+
+
+**Total running time of the script:** ( 0 minutes 8.568 seconds)
@@ -191,4 +237,4 @@ Wasserstein Discriminant Analysis
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_barycenter_1D.ipynb b/docs/source/auto_examples/plot_barycenter_1D.ipynb
index 239b8b8..657782d 100644
--- a/docs/source/auto_examples/plot_barycenter_1D.ipynb
+++ b/docs/source/auto_examples/plot_barycenter_1D.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n# 1D Wasserstein barycenter demo\n\n\n\n"
+ "\n# 1D Wasserstein barycenter demo\n\n\nThis example illustrates the computation of regularized Wassersyein Barycenter \nas proposed in [3].\n\n\n[3] Benamou, J. D., Carlier, G., Cuturi, M., Nenna, L., & Peyr\u00e9, G. (2015). \nIterative Bregman projections for regularized transportation problems\nSIAM Journal on Scientific Computing, 37(2), A1111-A1138.\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -24,7 +24,79 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot\n# necessary for 3d plot even if not used\nfrom mpl_toolkits.mplot3d import Axes3D # noqa\nfrom matplotlib.collections import PolyCollection\n\n\n#%% parameters\n\nn = 100 # nb bins\n\n# bin positions\nx = np.arange(n, dtype=np.float64)\n\n# Gaussian distributions\na1 = ot.datasets.get_1D_gauss(n, m=20, s=5) # m= mean, s= std\na2 = ot.datasets.get_1D_gauss(n, m=60, s=8)\n\n# creating matrix A containing all distributions\nA = np.vstack((a1, a2)).T\nn_distributions = A.shape[1]\n\n# loss matrix + normalization\nM = ot.utils.dist0(n)\nM /= M.max()\n\n#%% plot the distributions\n\npl.figure(1, figsize=(6.4, 3))\nfor i in range(n_distributions):\n pl.plot(x, A[:, i])\npl.title('Distributions')\npl.tight_layout()\n\n#%% barycenter computation\n\nalpha = 0.2 # 0<=alpha<=1\nweights = np.array([1 - alpha, alpha])\n\n# l2bary\nbary_l2 = A.dot(weights)\n\n# wasserstein\nreg = 1e-3\nbary_wass = ot.bregman.barycenter(A, M, reg, weights)\n\npl.figure(2)\npl.clf()\npl.subplot(2, 1, 1)\nfor i in range(n_distributions):\n pl.plot(x, A[:, i])\npl.title('Distributions')\n\npl.subplot(2, 1, 2)\npl.plot(x, bary_l2, 'r', label='l2')\npl.plot(x, bary_wass, 'g', label='Wasserstein')\npl.legend()\npl.title('Barycenters')\npl.tight_layout()\n\n#%% barycenter interpolation\n\nn_alpha = 11\nalpha_list = np.linspace(0, 1, n_alpha)\n\n\nB_l2 = np.zeros((n, n_alpha))\n\nB_wass = np.copy(B_l2)\n\nfor i in range(0, n_alpha):\n alpha = alpha_list[i]\n weights = np.array([1 - alpha, alpha])\n B_l2[:, i] = A.dot(weights)\n B_wass[:, i] = ot.bregman.barycenter(A, M, reg, weights)\n\n#%% plot interpolation\n\npl.figure(3)\n\ncmap = pl.cm.get_cmap('viridis')\nverts = []\nzs = alpha_list\nfor i, z in enumerate(zs):\n ys = B_l2[:, i]\n verts.append(list(zip(x, ys)))\n\nax = pl.gcf().gca(projection='3d')\n\npoly = PolyCollection(verts, facecolors=[cmap(a) for a in alpha_list])\npoly.set_alpha(0.7)\nax.add_collection3d(poly, zs=zs, zdir='y')\nax.set_xlabel('x')\nax.set_xlim3d(0, n)\nax.set_ylabel('$\\\\alpha$')\nax.set_ylim3d(0, 1)\nax.set_zlabel('')\nax.set_zlim3d(0, B_l2.max() * 1.01)\npl.title('Barycenter interpolation with l2')\npl.tight_layout()\n\npl.figure(4)\ncmap = pl.cm.get_cmap('viridis')\nverts = []\nzs = alpha_list\nfor i, z in enumerate(zs):\n ys = B_wass[:, i]\n verts.append(list(zip(x, ys)))\n\nax = pl.gcf().gca(projection='3d')\n\npoly = PolyCollection(verts, facecolors=[cmap(a) for a in alpha_list])\npoly.set_alpha(0.7)\nax.add_collection3d(poly, zs=zs, zdir='y')\nax.set_xlabel('x')\nax.set_xlim3d(0, n)\nax.set_ylabel('$\\\\alpha$')\nax.set_ylim3d(0, 1)\nax.set_zlabel('')\nax.set_zlim3d(0, B_l2.max() * 1.01)\npl.title('Barycenter interpolation with Wasserstein')\npl.tight_layout()\n\npl.show()"
+ "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot\n# necessary for 3d plot even if not used\nfrom mpl_toolkits.mplot3d import Axes3D # noqa\nfrom matplotlib.collections import PolyCollection"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Generate data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% parameters\n\nn = 100 # nb bins\n\n# bin positions\nx = np.arange(n, dtype=np.float64)\n\n# Gaussian distributions\na1 = ot.datasets.get_1D_gauss(n, m=20, s=5) # m= mean, s= std\na2 = ot.datasets.get_1D_gauss(n, m=60, s=8)\n\n# creating matrix A containing all distributions\nA = np.vstack((a1, a2)).T\nn_distributions = A.shape[1]\n\n# loss matrix + normalization\nM = ot.utils.dist0(n)\nM /= M.max()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Plot data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% plot the distributions\n\npl.figure(1, figsize=(6.4, 3))\nfor i in range(n_distributions):\n pl.plot(x, A[:, i])\npl.title('Distributions')\npl.tight_layout()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Barycenter computation\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% barycenter computation\n\nalpha = 0.2 # 0<=alpha<=1\nweights = np.array([1 - alpha, alpha])\n\n# l2bary\nbary_l2 = A.dot(weights)\n\n# wasserstein\nreg = 1e-3\nbary_wass = ot.bregman.barycenter(A, M, reg, weights)\n\npl.figure(2)\npl.clf()\npl.subplot(2, 1, 1)\nfor i in range(n_distributions):\n pl.plot(x, A[:, i])\npl.title('Distributions')\n\npl.subplot(2, 1, 2)\npl.plot(x, bary_l2, 'r', label='l2')\npl.plot(x, bary_wass, 'g', label='Wasserstein')\npl.legend()\npl.title('Barycenters')\npl.tight_layout()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Barycentric interpolation\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% barycenter interpolation\n\nn_alpha = 11\nalpha_list = np.linspace(0, 1, n_alpha)\n\n\nB_l2 = np.zeros((n, n_alpha))\n\nB_wass = np.copy(B_l2)\n\nfor i in range(0, n_alpha):\n alpha = alpha_list[i]\n weights = np.array([1 - alpha, alpha])\n B_l2[:, i] = A.dot(weights)\n B_wass[:, i] = ot.bregman.barycenter(A, M, reg, weights)\n\n#%% plot interpolation\n\npl.figure(3)\n\ncmap = pl.cm.get_cmap('viridis')\nverts = []\nzs = alpha_list\nfor i, z in enumerate(zs):\n ys = B_l2[:, i]\n verts.append(list(zip(x, ys)))\n\nax = pl.gcf().gca(projection='3d')\n\npoly = PolyCollection(verts, facecolors=[cmap(a) for a in alpha_list])\npoly.set_alpha(0.7)\nax.add_collection3d(poly, zs=zs, zdir='y')\nax.set_xlabel('x')\nax.set_xlim3d(0, n)\nax.set_ylabel('$\\\\alpha$')\nax.set_ylim3d(0, 1)\nax.set_zlabel('')\nax.set_zlim3d(0, B_l2.max() * 1.01)\npl.title('Barycenter interpolation with l2')\npl.tight_layout()\n\npl.figure(4)\ncmap = pl.cm.get_cmap('viridis')\nverts = []\nzs = alpha_list\nfor i, z in enumerate(zs):\n ys = B_wass[:, i]\n verts.append(list(zip(x, ys)))\n\nax = pl.gcf().gca(projection='3d')\n\npoly = PolyCollection(verts, facecolors=[cmap(a) for a in alpha_list])\npoly.set_alpha(0.7)\nax.add_collection3d(poly, zs=zs, zdir='y')\nax.set_xlabel('x')\nax.set_xlim3d(0, n)\nax.set_ylabel('$\\\\alpha$')\nax.set_ylim3d(0, 1)\nax.set_zlabel('')\nax.set_zlim3d(0, B_l2.max() * 1.01)\npl.title('Barycenter interpolation with Wasserstein')\npl.tight_layout()\n\npl.show()"
],
"outputs": [],
"metadata": {
diff --git a/docs/source/auto_examples/plot_barycenter_1D.py b/docs/source/auto_examples/plot_barycenter_1D.py
index 875f44c..142b05e 100644
--- a/docs/source/auto_examples/plot_barycenter_1D.py
+++ b/docs/source/auto_examples/plot_barycenter_1D.py
@@ -4,6 +4,14 @@
1D Wasserstein barycenter demo
==============================
+This example illustrates the computation of regularized Wassersyein Barycenter
+as proposed in [3].
+
+
+[3] Benamou, J. D., Carlier, G., Cuturi, M., Nenna, L., & Peyré, G. (2015).
+Iterative Bregman projections for regularized transportation problems
+SIAM Journal on Scientific Computing, 37(2), A1111-A1138.
+
"""
# Author: Remi Flamary <remi.flamary@unice.fr>
@@ -17,6 +25,9 @@ import ot
from mpl_toolkits.mplot3d import Axes3D # noqa
from matplotlib.collections import PolyCollection
+##############################################################################
+# Generate data
+##############################################################################
#%% parameters
@@ -37,6 +48,10 @@ n_distributions = A.shape[1]
M = ot.utils.dist0(n)
M /= M.max()
+##############################################################################
+# Plot data
+##############################################################################
+
#%% plot the distributions
pl.figure(1, figsize=(6.4, 3))
@@ -45,6 +60,10 @@ for i in range(n_distributions):
pl.title('Distributions')
pl.tight_layout()
+##############################################################################
+# Barycenter computation
+##############################################################################
+
#%% barycenter computation
alpha = 0.2 # 0<=alpha<=1
@@ -71,6 +90,10 @@ pl.legend()
pl.title('Barycenters')
pl.tight_layout()
+##############################################################################
+# Barycentric interpolation
+##############################################################################
+
#%% barycenter interpolation
n_alpha = 11
diff --git a/docs/source/auto_examples/plot_barycenter_1D.rst b/docs/source/auto_examples/plot_barycenter_1D.rst
index af88e80..d3f243f 100644
--- a/docs/source/auto_examples/plot_barycenter_1D.rst
+++ b/docs/source/auto_examples/plot_barycenter_1D.rst
@@ -7,33 +7,13 @@
1D Wasserstein barycenter demo
==============================
+This example illustrates the computation of regularized Wassersyein Barycenter
+as proposed in [3].
-
-
-.. rst-class:: sphx-glr-horizontal
-
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_001.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_002.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_003.png
- :scale: 47
-
- *
-
- .. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_004.png
- :scale: 47
-
+[3] Benamou, J. D., Carlier, G., Cuturi, M., Nenna, L., & Peyré, G. (2015).
+Iterative Bregman projections for regularized transportation problems
+SIAM Journal on Scientific Computing, 37(2), A1111-A1138.
@@ -53,6 +33,19 @@
from matplotlib.collections import PolyCollection
+
+
+
+
+
+Generate data
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% parameters
n = 100 # nb bins
@@ -72,6 +65,20 @@
M = ot.utils.dist0(n)
M /= M.max()
+
+
+
+
+
+
+Plot data
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% plot the distributions
pl.figure(1, figsize=(6.4, 3))
@@ -80,6 +87,23 @@
pl.title('Distributions')
pl.tight_layout()
+
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_001.png
+ :align: center
+
+
+
+
+Barycenter computation
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% barycenter computation
alpha = 0.2 # 0<=alpha<=1
@@ -106,6 +130,23 @@
pl.title('Barycenters')
pl.tight_layout()
+
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_003.png
+ :align: center
+
+
+
+
+Barycentric interpolation
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% barycenter interpolation
n_alpha = 11
@@ -171,7 +212,25 @@
pl.show()
-**Total running time of the script:** ( 0 minutes 0.546 seconds)
+
+
+.. rst-class:: sphx-glr-horizontal
+
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_005.png
+ :scale: 47
+
+ *
+
+ .. image:: /auto_examples/images/sphx_glr_plot_barycenter_1D_006.png
+ :scale: 47
+
+
+
+
+**Total running time of the script:** ( 0 minutes 0.520 seconds)
@@ -190,4 +249,4 @@
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_compute_emd.ipynb b/docs/source/auto_examples/plot_compute_emd.ipynb
index ce3f8c6..b28413b 100644
--- a/docs/source/auto_examples/plot_compute_emd.ipynb
+++ b/docs/source/auto_examples/plot_compute_emd.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n# 1D optimal transport\n\n\n\n"
+ "\n# Plot multiple EMD\n\n\nShows how to compute multiple EMD and Sinkhorn with two differnt \nground metrics and plot their values for diffeent distributions.\n\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -24,7 +24,79 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot\nfrom ot.datasets import get_1D_gauss as gauss\n\n\n#%% parameters\n\nn = 100 # nb bins\nn_target = 50 # nb target distributions\n\n\n# bin positions\nx = np.arange(n, dtype=np.float64)\n\nlst_m = np.linspace(20, 90, n_target)\n\n# Gaussian distributions\na = gauss(n, m=20, s=5) # m= mean, s= std\n\nB = np.zeros((n, n_target))\n\nfor i, m in enumerate(lst_m):\n B[:, i] = gauss(n, m=m, s=5)\n\n# loss matrix and normalization\nM = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)), 'euclidean')\nM /= M.max()\nM2 = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)), 'sqeuclidean')\nM2 /= M2.max()\n#%% plot the distributions\n\npl.figure(1)\npl.subplot(2, 1, 1)\npl.plot(x, a, 'b', label='Source distribution')\npl.title('Source distribution')\npl.subplot(2, 1, 2)\npl.plot(x, B, label='Target distributions')\npl.title('Target distributions')\npl.tight_layout()\n\n#%% Compute and plot distributions and loss matrix\n\nd_emd = ot.emd2(a, B, M) # direct computation of EMD\nd_emd2 = ot.emd2(a, B, M2) # direct computation of EMD with loss M3\n\n\npl.figure(2)\npl.plot(d_emd, label='Euclidean EMD')\npl.plot(d_emd2, label='Squared Euclidean EMD')\npl.title('EMD distances')\npl.legend()\n\n#%%\nreg = 1e-2\nd_sinkhorn = ot.sinkhorn2(a, B, M, reg)\nd_sinkhorn2 = ot.sinkhorn2(a, B, M2, reg)\n\npl.figure(2)\npl.clf()\npl.plot(d_emd, label='Euclidean EMD')\npl.plot(d_emd2, label='Squared Euclidean EMD')\npl.plot(d_sinkhorn, '+', label='Euclidean Sinkhorn')\npl.plot(d_sinkhorn2, '+', label='Squared Euclidean Sinkhorn')\npl.title('EMD distances')\npl.legend()\n\npl.show()"
+ "# Author: Remi Flamary <remi.flamary@unice.fr>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot\nfrom ot.datasets import get_1D_gauss as gauss"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Generate data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% parameters\n\nn = 100 # nb bins\nn_target = 50 # nb target distributions\n\n\n# bin positions\nx = np.arange(n, dtype=np.float64)\n\nlst_m = np.linspace(20, 90, n_target)\n\n# Gaussian distributions\na = gauss(n, m=20, s=5) # m= mean, s= std\n\nB = np.zeros((n, n_target))\n\nfor i, m in enumerate(lst_m):\n B[:, i] = gauss(n, m=m, s=5)\n\n# loss matrix and normalization\nM = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)), 'euclidean')\nM /= M.max()\nM2 = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)), 'sqeuclidean')\nM2 /= M2.max()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Plot data\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% plot the distributions\n\npl.figure(1)\npl.subplot(2, 1, 1)\npl.plot(x, a, 'b', label='Source distribution')\npl.title('Source distribution')\npl.subplot(2, 1, 2)\npl.plot(x, B, label='Target distributions')\npl.title('Target distributions')\npl.tight_layout()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Compute EMD for the different losses\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%% Compute and plot distributions and loss matrix\n\nd_emd = ot.emd2(a, B, M) # direct computation of EMD\nd_emd2 = ot.emd2(a, B, M2) # direct computation of EMD with loss M2\n\n\npl.figure(2)\npl.plot(d_emd, label='Euclidean EMD')\npl.plot(d_emd2, label='Squared Euclidean EMD')\npl.title('EMD distances')\npl.legend()"
+ ],
+ "outputs": [],
+ "metadata": {
+ "collapsed": false
+ }
+ },
+ {
+ "source": [
+ "Compute Sinkhorn for the different losses\n#############################################################################\n\n"
+ ],
+ "cell_type": "markdown",
+ "metadata": {}
+ },
+ {
+ "execution_count": null,
+ "cell_type": "code",
+ "source": [
+ "#%%\nreg = 1e-2\nd_sinkhorn = ot.sinkhorn2(a, B, M, reg)\nd_sinkhorn2 = ot.sinkhorn2(a, B, M2, reg)\n\npl.figure(2)\npl.clf()\npl.plot(d_emd, label='Euclidean EMD')\npl.plot(d_emd2, label='Squared Euclidean EMD')\npl.plot(d_sinkhorn, '+', label='Euclidean Sinkhorn')\npl.plot(d_sinkhorn2, '+', label='Squared Euclidean Sinkhorn')\npl.title('EMD distances')\npl.legend()\n\npl.show()"
],
"outputs": [],
"metadata": {
diff --git a/docs/source/auto_examples/plot_compute_emd.py b/docs/source/auto_examples/plot_compute_emd.py
index 893eecf..b688f93 100644
--- a/docs/source/auto_examples/plot_compute_emd.py
+++ b/docs/source/auto_examples/plot_compute_emd.py
@@ -1,8 +1,12 @@
# -*- coding: utf-8 -*-
"""
-====================
-1D optimal transport
-====================
+=================
+Plot multiple EMD
+=================
+
+Shows how to compute multiple EMD and Sinkhorn with two differnt
+ground metrics and plot their values for diffeent distributions.
+
"""
@@ -16,6 +20,10 @@ import ot
from ot.datasets import get_1D_gauss as gauss
+##############################################################################
+# Generate data
+##############################################################################
+
#%% parameters
n = 100 # nb bins
@@ -40,6 +48,11 @@ M = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)), 'euclidean')
M /= M.max()
M2 = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)), 'sqeuclidean')
M2 /= M2.max()
+
+##############################################################################
+# Plot data
+##############################################################################
+
#%% plot the distributions
pl.figure(1)
@@ -51,10 +64,15 @@ pl.plot(x, B, label='Target distributions')
pl.title('Target distributions')
pl.tight_layout()
+
+##############################################################################
+# Compute EMD for the different losses
+##############################################################################
+
#%% Compute and plot distributions and loss matrix
d_emd = ot.emd2(a, B, M) # direct computation of EMD
-d_emd2 = ot.emd2(a, B, M2) # direct computation of EMD with loss M3
+d_emd2 = ot.emd2(a, B, M2) # direct computation of EMD with loss M2
pl.figure(2)
@@ -63,6 +81,10 @@ pl.plot(d_emd2, label='Squared Euclidean EMD')
pl.title('EMD distances')
pl.legend()
+##############################################################################
+# Compute Sinkhorn for the different losses
+##############################################################################
+
#%%
reg = 1e-2
d_sinkhorn = ot.sinkhorn2(a, B, M, reg)
diff --git a/docs/source/auto_examples/plot_compute_emd.rst b/docs/source/auto_examples/plot_compute_emd.rst
index f2e2005..b489255 100644
--- a/docs/source/auto_examples/plot_compute_emd.rst
+++ b/docs/source/auto_examples/plot_compute_emd.rst
@@ -3,42 +3,42 @@
.. _sphx_glr_auto_examples_plot_compute_emd.py:
-====================
-1D optimal transport
-====================
+=================
+Plot multiple EMD
+=================
+Shows how to compute multiple EMD and Sinkhorn with two differnt
+ground metrics and plot their values for diffeent distributions.
-.. rst-class:: sphx-glr-horizontal
+.. code-block:: python
- *
- .. image:: /auto_examples/images/sphx_glr_plot_compute_emd_001.png
- :scale: 47
+ # Author: Remi Flamary <remi.flamary@unice.fr>
+ #
+ # License: MIT License
- *
+ import numpy as np
+ import matplotlib.pylab as pl
+ import ot
+ from ot.datasets import get_1D_gauss as gauss
- .. image:: /auto_examples/images/sphx_glr_plot_compute_emd_002.png
- :scale: 47
-.. code-block:: python
- # Author: Remi Flamary <remi.flamary@unice.fr>
- #
- # License: MIT License
+Generate data
+#############################################################################
- import numpy as np
- import matplotlib.pylab as pl
- import ot
- from ot.datasets import get_1D_gauss as gauss
+
+
+.. code-block:: python
#%% parameters
@@ -65,6 +65,21 @@
M /= M.max()
M2 = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)), 'sqeuclidean')
M2 /= M2.max()
+
+
+
+
+
+
+
+Plot data
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% plot the distributions
pl.figure(1)
@@ -76,10 +91,28 @@
pl.title('Target distributions')
pl.tight_layout()
+
+
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_compute_emd_001.png
+ :align: center
+
+
+
+
+Compute EMD for the different losses
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%% Compute and plot distributions and loss matrix
d_emd = ot.emd2(a, B, M) # direct computation of EMD
- d_emd2 = ot.emd2(a, B, M2) # direct computation of EMD with loss M3
+ d_emd2 = ot.emd2(a, B, M2) # direct computation of EMD with loss M2
pl.figure(2)
@@ -88,6 +121,23 @@
pl.title('EMD distances')
pl.legend()
+
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_compute_emd_003.png
+ :align: center
+
+
+
+
+Compute Sinkhorn for the different losses
+#############################################################################
+
+
+
+.. code-block:: python
+
+
#%%
reg = 1e-2
d_sinkhorn = ot.sinkhorn2(a, B, M, reg)
@@ -104,7 +154,15 @@
pl.show()
-**Total running time of the script:** ( 0 minutes 0.906 seconds)
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_compute_emd_004.png
+ :align: center
+
+
+
+
+**Total running time of the script:** ( 0 minutes 0.427 seconds)
@@ -123,4 +181,4 @@
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_optim_OTreg.ipynb b/docs/source/auto_examples/plot_optim_OTreg.ipynb
index 9d26e4d..290100f 100644
--- a/docs/source/auto_examples/plot_optim_OTreg.ipynb
+++ b/docs/source/auto_examples/plot_optim_OTreg.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n# Regularized OT with generic solver\n\n\n\n\n"
+ "\n# Regularized OT with generic solver\n\n\nIllustrates the use of the generic solver for regularized OT with\nuser-designed regularization term. It uses Conditional gradient as in [6] and \ngeneralized Conditional Gradient as proposed in [5][7].\n\n\n[5] N. Courty; R. Flamary; D. Tuia; A. Rakotomamonjy, Optimal Transport for \nDomain Adaptation, in IEEE Transactions on Pattern Analysis and Machine \nIntelligence , vol.PP, no.99, pp.1-1.\n\n[6] Ferradans, S., Papadakis, N., Peyr\u00e9, G., & Aujol, J. F. (2014). \nRegularized discrete optimal transport. SIAM Journal on Imaging Sciences, \n7(3), 1853-1882.\n\n[7] Rakotomamonjy, A., Flamary, R., & Courty, N. (2015). Generalized \nconditional gradient: analysis of convergence and applications. \narXiv preprint arXiv:1510.06567.\n\n\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -33,7 +33,7 @@
},
{
"source": [
- "Generate data \n#############################################################################\n\n"
+ "Generate data\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -51,7 +51,7 @@
},
{
"source": [
- "Solve EMD \n#############################################################################\n\n"
+ "Solve EMD\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -114,7 +114,7 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "#%% Example with Frobenius norm + entropic regularization with gcg\n\ndef f(G):\n return 0.5 * np.sum(G**2)\n\n\ndef df(G):\n return G\n\n\nreg1 = 1e-3\nreg2 = 1e-1\n\nGel2 = ot.optim.gcg(a, b, M, reg1, reg2, f, df, verbose=True)\n\npl.figure(5, figsize=(5, 5))\not.plot.plot1D_mat(a, b, Gel2, 'OT entropic + matrix Frob. reg')\npl.show()"
+ "#%% Example with Frobenius norm + entropic regularization with gcg\n\n\ndef f(G):\n return 0.5 * np.sum(G**2)\n\n\ndef df(G):\n return G\n\n\nreg1 = 1e-3\nreg2 = 1e-1\n\nGel2 = ot.optim.gcg(a, b, M, reg1, reg2, f, df, verbose=True)\n\npl.figure(5, figsize=(5, 5))\not.plot.plot1D_mat(a, b, Gel2, 'OT entropic + matrix Frob. reg')\npl.show()"
],
"outputs": [],
"metadata": {
diff --git a/docs/source/auto_examples/plot_optim_OTreg.py b/docs/source/auto_examples/plot_optim_OTreg.py
index d36b269..b362662 100644
--- a/docs/source/auto_examples/plot_optim_OTreg.py
+++ b/docs/source/auto_examples/plot_optim_OTreg.py
@@ -4,6 +4,24 @@
Regularized OT with generic solver
==================================
+Illustrates the use of the generic solver for regularized OT with
+user-designed regularization term. It uses Conditional gradient as in [6] and
+generalized Conditional Gradient as proposed in [5][7].
+
+
+[5] N. Courty; R. Flamary; D. Tuia; A. Rakotomamonjy, Optimal Transport for
+Domain Adaptation, in IEEE Transactions on Pattern Analysis and Machine
+Intelligence , vol.PP, no.99, pp.1-1.
+
+[6] Ferradans, S., Papadakis, N., Peyré, G., & Aujol, J. F. (2014).
+Regularized discrete optimal transport. SIAM Journal on Imaging Sciences,
+7(3), 1853-1882.
+
+[7] Rakotomamonjy, A., Flamary, R., & Courty, N. (2015). Generalized
+conditional gradient: analysis of convergence and applications.
+arXiv preprint arXiv:1510.06567.
+
+
"""
@@ -13,7 +31,7 @@ import ot
##############################################################################
-# Generate data
+# Generate data
##############################################################################
#%% parameters
@@ -32,7 +50,7 @@ M = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)))
M /= M.max()
##############################################################################
-# Solve EMD
+# Solve EMD
##############################################################################
#%% EMD
@@ -92,6 +110,7 @@ ot.plot.plot1D_mat(a, b, Ge, 'OT matrix Entrop. reg')
#%% Example with Frobenius norm + entropic regularization with gcg
+
def f(G):
return 0.5 * np.sum(G**2)
diff --git a/docs/source/auto_examples/plot_optim_OTreg.rst b/docs/source/auto_examples/plot_optim_OTreg.rst
index 532d4ca..d444631 100644
--- a/docs/source/auto_examples/plot_optim_OTreg.rst
+++ b/docs/source/auto_examples/plot_optim_OTreg.rst
@@ -7,6 +7,24 @@
Regularized OT with generic solver
==================================
+Illustrates the use of the generic solver for regularized OT with
+user-designed regularization term. It uses Conditional gradient as in [6] and
+generalized Conditional Gradient as proposed in [5][7].
+
+
+[5] N. Courty; R. Flamary; D. Tuia; A. Rakotomamonjy, Optimal Transport for
+Domain Adaptation, in IEEE Transactions on Pattern Analysis and Machine
+Intelligence , vol.PP, no.99, pp.1-1.
+
+[6] Ferradans, S., Papadakis, N., Peyré, G., & Aujol, J. F. (2014).
+Regularized discrete optimal transport. SIAM Journal on Imaging Sciences,
+7(3), 1853-1882.
+
+[7] Rakotomamonjy, A., Flamary, R., & Courty, N. (2015). Generalized
+conditional gradient: analysis of convergence and applications.
+arXiv preprint arXiv:1510.06567.
+
+
@@ -25,7 +43,7 @@ Regularized OT with generic solver
-Generate data
+Generate data
#############################################################################
@@ -54,7 +72,7 @@ Generate data
-Solve EMD
+Solve EMD
#############################################################################
@@ -612,6 +630,7 @@ Solve EMD with Frobenius norm + entropic regularization
#%% Example with Frobenius norm + entropic regularization with gcg
+
def f(G):
return 0.5 * np.sum(G**2)
@@ -645,10 +664,10 @@ Solve EMD with Frobenius norm + entropic regularization
1|1.610121e-01|-5.152589e-02
2|1.609378e-01|-4.622297e-04
3|1.609284e-01|-5.830043e-05
- 4|1.609284e-01|-1.111580e-12
+ 4|1.609284e-01|-1.111407e-12
-**Total running time of the script:** ( 0 minutes 2.719 seconds)
+**Total running time of the script:** ( 0 minutes 1.867 seconds)
@@ -667,4 +686,4 @@ Solve EMD with Frobenius norm + entropic regularization
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_otda_classes.rst b/docs/source/auto_examples/plot_otda_classes.rst
index 227a819..d1a13b1 100644
--- a/docs/source/auto_examples/plot_otda_classes.rst
+++ b/docs/source/auto_examples/plot_otda_classes.rst
@@ -94,29 +94,29 @@ Instantiate the different transport algorithms and fit them
It. |Loss |Delta loss
--------------------------------
- 0|9.456043e+00|0.000000e+00
- 1|2.059035e+00|-3.592463e+00
- 2|1.839814e+00|-1.191540e-01
- 3|1.787860e+00|-2.905942e-02
- 4|1.766582e+00|-1.204485e-02
- 5|1.760573e+00|-3.413038e-03
- 6|1.755288e+00|-3.010556e-03
- 7|1.749124e+00|-3.523968e-03
- 8|1.744159e+00|-2.846760e-03
- 9|1.741007e+00|-1.810862e-03
- 10|1.739839e+00|-6.710130e-04
- 11|1.737221e+00|-1.507260e-03
- 12|1.736011e+00|-6.970742e-04
- 13|1.734948e+00|-6.126425e-04
- 14|1.733901e+00|-6.038775e-04
- 15|1.733768e+00|-7.618542e-05
- 16|1.732821e+00|-5.467723e-04
- 17|1.732678e+00|-8.226843e-05
- 18|1.731934e+00|-4.300066e-04
- 19|1.731850e+00|-4.848002e-05
+ 0|9.984935e+00|0.000000e+00
+ 1|2.126803e+00|-3.694808e+00
+ 2|1.867272e+00|-1.389895e-01
+ 3|1.803858e+00|-3.515488e-02
+ 4|1.783036e+00|-1.167761e-02
+ 5|1.774823e+00|-4.627422e-03
+ 6|1.771947e+00|-1.623526e-03
+ 7|1.767564e+00|-2.479535e-03
+ 8|1.763484e+00|-2.313667e-03
+ 9|1.761138e+00|-1.331780e-03
+ 10|1.758879e+00|-1.284576e-03
+ 11|1.758034e+00|-4.806014e-04
+ 12|1.757595e+00|-2.497155e-04
+ 13|1.756749e+00|-4.818562e-04
+ 14|1.755316e+00|-8.161432e-04
+ 15|1.754988e+00|-1.866236e-04
+ 16|1.754964e+00|-1.382474e-05
+ 17|1.754032e+00|-5.315971e-04
+ 18|1.753595e+00|-2.492359e-04
+ 19|1.752900e+00|-3.961403e-04
It. |Loss |Delta loss
--------------------------------
- 20|1.731699e+00|-8.729590e-05
+ 20|1.752850e+00|-2.869262e-05
Fig 1 : plots source and target samples
@@ -236,7 +236,7 @@ Fig 2 : plot optimal couplings and transported samples
-**Total running time of the script:** ( 0 minutes 1.906 seconds)
+**Total running time of the script:** ( 0 minutes 1.576 seconds)
@@ -255,4 +255,4 @@ Fig 2 : plot optimal couplings and transported samples
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_otda_color_images.ipynb b/docs/source/auto_examples/plot_otda_color_images.ipynb
index c45c307..797b27d 100644
--- a/docs/source/auto_examples/plot_otda_color_images.ipynb
+++ b/docs/source/auto_examples/plot_otda_color_images.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n========================================================\nOT for domain adaptation with image color adaptation [6]\n========================================================\n\nThis example presents a way of transferring colors between two image\nwith Optimal Transport as introduced in [6]\n\n[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014).\nRegularized discrete optimal transport.\nSIAM Journal on Imaging Sciences, 7(3), 1853-1882.\n\n"
+ "\n# OT for image color adaptation\n\n\nThis example presents a way of transferring colors between two image\nwith Optimal Transport as introduced in [6]\n\n[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014).\nRegularized discrete optimal transport.\nSIAM Journal on Imaging Sciences, 7(3), 1853-1882.\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -33,7 +33,7 @@
},
{
"source": [
- "generate data\n#############################################################################\n\n"
+ "Generate data\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -51,7 +51,7 @@
},
{
"source": [
- "Instantiate the different transport algorithms and fit them\n#############################################################################\n\n"
+ "Plot original image\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -60,7 +60,7 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "# EMDTransport\not_emd = ot.da.EMDTransport()\not_emd.fit(Xs=Xs, Xt=Xt)\n\n# SinkhornTransport\not_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)\not_sinkhorn.fit(Xs=Xs, Xt=Xt)\n\n# prediction between images (using out of sample prediction as in [6])\ntransp_Xs_emd = ot_emd.transform(Xs=X1)\ntransp_Xt_emd = ot_emd.inverse_transform(Xt=X2)\n\ntransp_Xs_sinkhorn = ot_emd.transform(Xs=X1)\ntransp_Xt_sinkhorn = ot_emd.inverse_transform(Xt=X2)\n\nI1t = minmax(mat2im(transp_Xs_emd, I1.shape))\nI2t = minmax(mat2im(transp_Xt_emd, I2.shape))\n\nI1te = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))\nI2te = minmax(mat2im(transp_Xt_sinkhorn, I2.shape))"
+ "pl.figure(1, figsize=(6.4, 3))\n\npl.subplot(1, 2, 1)\npl.imshow(I1)\npl.axis('off')\npl.title('Image 1')\n\npl.subplot(1, 2, 2)\npl.imshow(I2)\npl.axis('off')\npl.title('Image 2')"
],
"outputs": [],
"metadata": {
@@ -69,7 +69,7 @@
},
{
"source": [
- "plot original image\n#############################################################################\n\n"
+ "Scatter plot of colors\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -78,7 +78,7 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "pl.figure(1, figsize=(6.4, 3))\n\npl.subplot(1, 2, 1)\npl.imshow(I1)\npl.axis('off')\npl.title('Image 1')\n\npl.subplot(1, 2, 2)\npl.imshow(I2)\npl.axis('off')\npl.title('Image 2')"
+ "pl.figure(2, figsize=(6.4, 3))\n\npl.subplot(1, 2, 1)\npl.scatter(Xs[:, 0], Xs[:, 2], c=Xs)\npl.axis([0, 1, 0, 1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 1')\n\npl.subplot(1, 2, 2)\npl.scatter(Xt[:, 0], Xt[:, 2], c=Xt)\npl.axis([0, 1, 0, 1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 2')\npl.tight_layout()"
],
"outputs": [],
"metadata": {
@@ -87,7 +87,7 @@
},
{
"source": [
- "scatter plot of colors\n#############################################################################\n\n"
+ "Instantiate the different transport algorithms and fit them\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -96,7 +96,7 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "pl.figure(2, figsize=(6.4, 3))\n\npl.subplot(1, 2, 1)\npl.scatter(Xs[:, 0], Xs[:, 2], c=Xs)\npl.axis([0, 1, 0, 1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 1')\n\npl.subplot(1, 2, 2)\npl.scatter(Xt[:, 0], Xt[:, 2], c=Xt)\npl.axis([0, 1, 0, 1])\npl.xlabel('Red')\npl.ylabel('Blue')\npl.title('Image 2')\npl.tight_layout()"
+ "# EMDTransport\not_emd = ot.da.EMDTransport()\not_emd.fit(Xs=Xs, Xt=Xt)\n\n# SinkhornTransport\not_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)\not_sinkhorn.fit(Xs=Xs, Xt=Xt)\n\n# prediction between images (using out of sample prediction as in [6])\ntransp_Xs_emd = ot_emd.transform(Xs=X1)\ntransp_Xt_emd = ot_emd.inverse_transform(Xt=X2)\n\ntransp_Xs_sinkhorn = ot_emd.transform(Xs=X1)\ntransp_Xt_sinkhorn = ot_emd.inverse_transform(Xt=X2)\n\nI1t = minmax(mat2im(transp_Xs_emd, I1.shape))\nI2t = minmax(mat2im(transp_Xt_emd, I2.shape))\n\nI1te = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))\nI2te = minmax(mat2im(transp_Xt_sinkhorn, I2.shape))"
],
"outputs": [],
"metadata": {
@@ -105,7 +105,7 @@
},
{
"source": [
- "plot new images\n#############################################################################\n\n"
+ "Plot new images\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
diff --git a/docs/source/auto_examples/plot_otda_color_images.py b/docs/source/auto_examples/plot_otda_color_images.py
index 46ad44b..f1df9d9 100644
--- a/docs/source/auto_examples/plot_otda_color_images.py
+++ b/docs/source/auto_examples/plot_otda_color_images.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
"""
-========================================================
-OT for domain adaptation with image color adaptation [6]
-========================================================
+=============================
+OT for image color adaptation
+=============================
This example presents a way of transferring colors between two image
with Optimal Transport as introduced in [6]
@@ -41,7 +41,7 @@ def minmax(I):
##############################################################################
-# generate data
+# Generate data
##############################################################################
# Loading images
@@ -61,33 +61,7 @@ Xt = X2[idx2, :]
##############################################################################
-# Instantiate the different transport algorithms and fit them
-##############################################################################
-
-# EMDTransport
-ot_emd = ot.da.EMDTransport()
-ot_emd.fit(Xs=Xs, Xt=Xt)
-
-# SinkhornTransport
-ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
-ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
-
-# prediction between images (using out of sample prediction as in [6])
-transp_Xs_emd = ot_emd.transform(Xs=X1)
-transp_Xt_emd = ot_emd.inverse_transform(Xt=X2)
-
-transp_Xs_sinkhorn = ot_emd.transform(Xs=X1)
-transp_Xt_sinkhorn = ot_emd.inverse_transform(Xt=X2)
-
-I1t = minmax(mat2im(transp_Xs_emd, I1.shape))
-I2t = minmax(mat2im(transp_Xt_emd, I2.shape))
-
-I1te = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))
-I2te = minmax(mat2im(transp_Xt_sinkhorn, I2.shape))
-
-
-##############################################################################
-# plot original image
+# Plot original image
##############################################################################
pl.figure(1, figsize=(6.4, 3))
@@ -104,7 +78,7 @@ pl.title('Image 2')
##############################################################################
-# scatter plot of colors
+# Scatter plot of colors
##############################################################################
pl.figure(2, figsize=(6.4, 3))
@@ -126,7 +100,33 @@ pl.tight_layout()
##############################################################################
-# plot new images
+# Instantiate the different transport algorithms and fit them
+##############################################################################
+
+# EMDTransport
+ot_emd = ot.da.EMDTransport()
+ot_emd.fit(Xs=Xs, Xt=Xt)
+
+# SinkhornTransport
+ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
+ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
+
+# prediction between images (using out of sample prediction as in [6])
+transp_Xs_emd = ot_emd.transform(Xs=X1)
+transp_Xt_emd = ot_emd.inverse_transform(Xt=X2)
+
+transp_Xs_sinkhorn = ot_emd.transform(Xs=X1)
+transp_Xt_sinkhorn = ot_emd.inverse_transform(Xt=X2)
+
+I1t = minmax(mat2im(transp_Xs_emd, I1.shape))
+I2t = minmax(mat2im(transp_Xt_emd, I2.shape))
+
+I1te = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))
+I2te = minmax(mat2im(transp_Xt_sinkhorn, I2.shape))
+
+
+##############################################################################
+# Plot new images
##############################################################################
pl.figure(3, figsize=(8, 4))
diff --git a/docs/source/auto_examples/plot_otda_color_images.rst b/docs/source/auto_examples/plot_otda_color_images.rst
index e3989c8..88e93d2 100644
--- a/docs/source/auto_examples/plot_otda_color_images.rst
+++ b/docs/source/auto_examples/plot_otda_color_images.rst
@@ -3,9 +3,9 @@
.. _sphx_glr_auto_examples_plot_otda_color_images.py:
-========================================================
-OT for domain adaptation with image color adaptation [6]
-========================================================
+=============================
+OT for image color adaptation
+=============================
This example presents a way of transferring colors between two image
with Optimal Transport as introduced in [6]
@@ -53,7 +53,7 @@ SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
-generate data
+Generate data
#############################################################################
@@ -83,43 +83,7 @@ generate data
-Instantiate the different transport algorithms and fit them
-#############################################################################
-
-
-
-.. code-block:: python
-
-
- # EMDTransport
- ot_emd = ot.da.EMDTransport()
- ot_emd.fit(Xs=Xs, Xt=Xt)
-
- # SinkhornTransport
- ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
- ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
-
- # prediction between images (using out of sample prediction as in [6])
- transp_Xs_emd = ot_emd.transform(Xs=X1)
- transp_Xt_emd = ot_emd.inverse_transform(Xt=X2)
-
- transp_Xs_sinkhorn = ot_emd.transform(Xs=X1)
- transp_Xt_sinkhorn = ot_emd.inverse_transform(Xt=X2)
-
- I1t = minmax(mat2im(transp_Xs_emd, I1.shape))
- I2t = minmax(mat2im(transp_Xt_emd, I2.shape))
-
- I1te = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))
- I2te = minmax(mat2im(transp_Xt_sinkhorn, I2.shape))
-
-
-
-
-
-
-
-
-plot original image
+Plot original image
#############################################################################
@@ -149,7 +113,7 @@ plot original image
-scatter plot of colors
+Scatter plot of colors
#############################################################################
@@ -184,7 +148,43 @@ scatter plot of colors
-plot new images
+Instantiate the different transport algorithms and fit them
+#############################################################################
+
+
+
+.. code-block:: python
+
+
+ # EMDTransport
+ ot_emd = ot.da.EMDTransport()
+ ot_emd.fit(Xs=Xs, Xt=Xt)
+
+ # SinkhornTransport
+ ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
+ ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
+
+ # prediction between images (using out of sample prediction as in [6])
+ transp_Xs_emd = ot_emd.transform(Xs=X1)
+ transp_Xt_emd = ot_emd.inverse_transform(Xt=X2)
+
+ transp_Xs_sinkhorn = ot_emd.transform(Xs=X1)
+ transp_Xt_sinkhorn = ot_emd.inverse_transform(Xt=X2)
+
+ I1t = minmax(mat2im(transp_Xs_emd, I1.shape))
+ I2t = minmax(mat2im(transp_Xt_emd, I2.shape))
+
+ I1te = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))
+ I2te = minmax(mat2im(transp_Xt_sinkhorn, I2.shape))
+
+
+
+
+
+
+
+
+Plot new images
#############################################################################
@@ -235,7 +235,7 @@ plot new images
-**Total running time of the script:** ( 3 minutes 16.043 seconds)
+**Total running time of the script:** ( 2 minutes 28.053 seconds)
@@ -254,4 +254,4 @@ plot new images
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_otda_d2.rst b/docs/source/auto_examples/plot_otda_d2.rst
index 20b76ba..3aa1149 100644
--- a/docs/source/auto_examples/plot_otda_d2.rst
+++ b/docs/source/auto_examples/plot_otda_d2.rst
@@ -243,7 +243,7 @@ Fig 3 : plot transported samples
-**Total running time of the script:** ( 0 minutes 46.009 seconds)
+**Total running time of the script:** ( 0 minutes 32.275 seconds)
@@ -262,4 +262,4 @@ Fig 3 : plot transported samples
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_otda_mapping.ipynb b/docs/source/auto_examples/plot_otda_mapping.ipynb
index 0b5ca5c..5b3fd06 100644
--- a/docs/source/auto_examples/plot_otda_mapping.ipynb
+++ b/docs/source/auto_examples/plot_otda_mapping.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n===============================================\nOT mapping estimation for domain adaptation [8]\n===============================================\n\nThis example presents how to use MappingTransport to estimate at the same\ntime both the coupling transport and approximate the transport map with either\na linear or a kernelized mapping as introduced in [8]\n\n[8] M. Perrot, N. Courty, R. Flamary, A. Habrard,\n \"Mapping estimation for discrete optimal transport\",\n Neural Information Processing Systems (NIPS), 2016.\n\n"
+ "\n# OT mapping estimation for domain adaptation\n\n\nThis example presents how to use MappingTransport to estimate at the same\ntime both the coupling transport and approximate the transport map with either\na linear or a kernelized mapping as introduced in [8].\n\n[8] M. Perrot, N. Courty, R. Flamary, A. Habrard,\n \"Mapping estimation for discrete optimal transport\",\n Neural Information Processing Systems (NIPS), 2016.\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -33,7 +33,7 @@
},
{
"source": [
- "generate data\n#############################################################################\n\n"
+ "Generate data\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -51,7 +51,7 @@
},
{
"source": [
- "Instantiate the different transport algorithms and fit them\n#############################################################################\n\n"
+ "Plot data\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -60,7 +60,7 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "# MappingTransport with linear kernel\not_mapping_linear = ot.da.MappingTransport(\n kernel=\"linear\", mu=1e0, eta=1e-8, bias=True,\n max_iter=20, verbose=True)\n\not_mapping_linear.fit(Xs=Xs, Xt=Xt)\n\n# for original source samples, transform applies barycentric mapping\ntransp_Xs_linear = ot_mapping_linear.transform(Xs=Xs)\n\n# for out of source samples, transform applies the linear mapping\ntransp_Xs_linear_new = ot_mapping_linear.transform(Xs=Xs_new)\n\n\n# MappingTransport with gaussian kernel\not_mapping_gaussian = ot.da.MappingTransport(\n kernel=\"gaussian\", eta=1e-5, mu=1e-1, bias=True, sigma=1,\n max_iter=10, verbose=True)\not_mapping_gaussian.fit(Xs=Xs, Xt=Xt)\n\n# for original source samples, transform applies barycentric mapping\ntransp_Xs_gaussian = ot_mapping_gaussian.transform(Xs=Xs)\n\n# for out of source samples, transform applies the gaussian mapping\ntransp_Xs_gaussian_new = ot_mapping_gaussian.transform(Xs=Xs_new)"
+ "pl.figure(1, (10, 5))\npl.clf()\npl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')\npl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')\npl.legend(loc=0)\npl.title('Source and target distributions')"
],
"outputs": [],
"metadata": {
@@ -69,7 +69,7 @@
},
{
"source": [
- "plot data\n#############################################################################\n\n"
+ "Instantiate the different transport algorithms and fit them\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -78,7 +78,7 @@
"execution_count": null,
"cell_type": "code",
"source": [
- "pl.figure(1, (10, 5))\npl.clf()\npl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')\npl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')\npl.legend(loc=0)\npl.title('Source and target distributions')"
+ "# MappingTransport with linear kernel\not_mapping_linear = ot.da.MappingTransport(\n kernel=\"linear\", mu=1e0, eta=1e-8, bias=True,\n max_iter=20, verbose=True)\n\not_mapping_linear.fit(Xs=Xs, Xt=Xt)\n\n# for original source samples, transform applies barycentric mapping\ntransp_Xs_linear = ot_mapping_linear.transform(Xs=Xs)\n\n# for out of source samples, transform applies the linear mapping\ntransp_Xs_linear_new = ot_mapping_linear.transform(Xs=Xs_new)\n\n\n# MappingTransport with gaussian kernel\not_mapping_gaussian = ot.da.MappingTransport(\n kernel=\"gaussian\", eta=1e-5, mu=1e-1, bias=True, sigma=1,\n max_iter=10, verbose=True)\not_mapping_gaussian.fit(Xs=Xs, Xt=Xt)\n\n# for original source samples, transform applies barycentric mapping\ntransp_Xs_gaussian = ot_mapping_gaussian.transform(Xs=Xs)\n\n# for out of source samples, transform applies the gaussian mapping\ntransp_Xs_gaussian_new = ot_mapping_gaussian.transform(Xs=Xs_new)"
],
"outputs": [],
"metadata": {
@@ -87,7 +87,7 @@
},
{
"source": [
- "plot transported samples\n#############################################################################\n\n"
+ "Plot transported samples\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
diff --git a/docs/source/auto_examples/plot_otda_mapping.py b/docs/source/auto_examples/plot_otda_mapping.py
index 09d2cb4..e78fef4 100644
--- a/docs/source/auto_examples/plot_otda_mapping.py
+++ b/docs/source/auto_examples/plot_otda_mapping.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
"""
-===============================================
-OT mapping estimation for domain adaptation [8]
-===============================================
+===========================================
+OT mapping estimation for domain adaptation
+===========================================
This example presents how to use MappingTransport to estimate at the same
time both the coupling transport and approximate the transport map with either
-a linear or a kernelized mapping as introduced in [8]
+a linear or a kernelized mapping as introduced in [8].
[8] M. Perrot, N. Courty, R. Flamary, A. Habrard,
"Mapping estimation for discrete optimal transport",
@@ -24,7 +24,7 @@ import ot
##############################################################################
-# generate data
+# Generate data
##############################################################################
n_source_samples = 100
@@ -43,6 +43,17 @@ Xt, yt = ot.datasets.get_data_classif(
Xt[yt == 2] *= 3
Xt = Xt + 4
+##############################################################################
+# Plot data
+##############################################################################
+
+pl.figure(1, (10, 5))
+pl.clf()
+pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+pl.legend(loc=0)
+pl.title('Source and target distributions')
+
##############################################################################
# Instantiate the different transport algorithms and fit them
@@ -76,19 +87,7 @@ transp_Xs_gaussian_new = ot_mapping_gaussian.transform(Xs=Xs_new)
##############################################################################
-# plot data
-##############################################################################
-
-pl.figure(1, (10, 5))
-pl.clf()
-pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
-pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
-pl.legend(loc=0)
-pl.title('Source and target distributions')
-
-
-##############################################################################
-# plot transported samples
+# Plot transported samples
##############################################################################
pl.figure(2)
diff --git a/docs/source/auto_examples/plot_otda_mapping.rst b/docs/source/auto_examples/plot_otda_mapping.rst
index 088da31..ddc1ee9 100644
--- a/docs/source/auto_examples/plot_otda_mapping.rst
+++ b/docs/source/auto_examples/plot_otda_mapping.rst
@@ -3,13 +3,13 @@
.. _sphx_glr_auto_examples_plot_otda_mapping.py:
-===============================================
-OT mapping estimation for domain adaptation [8]
-===============================================
+===========================================
+OT mapping estimation for domain adaptation
+===========================================
This example presents how to use MappingTransport to estimate at the same
time both the coupling transport and approximate the transport map with either
-a linear or a kernelized mapping as introduced in [8]
+a linear or a kernelized mapping as introduced in [8].
[8] M. Perrot, N. Courty, R. Flamary, A. Habrard,
"Mapping estimation for discrete optimal transport",
@@ -36,7 +36,7 @@ a linear or a kernelized mapping as introduced in [8]
-generate data
+Generate data
#############################################################################
@@ -66,6 +66,30 @@ generate data
+Plot data
+#############################################################################
+
+
+
+.. code-block:: python
+
+
+ pl.figure(1, (10, 5))
+ pl.clf()
+ pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+ pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+ pl.legend(loc=0)
+ pl.title('Source and target distributions')
+
+
+
+
+
+.. image:: /auto_examples/images/sphx_glr_plot_otda_mapping_001.png
+ :align: center
+
+
+
Instantiate the different transport algorithms and fit them
#############################################################################
@@ -112,54 +136,29 @@ Instantiate the different transport algorithms and fit them
It. |Loss |Delta loss
--------------------------------
- 0|4.273804e+03|0.000000e+00
- 1|4.264510e+03|-2.174580e-03
- 2|4.264209e+03|-7.047095e-05
- 3|4.264078e+03|-3.069822e-05
- 4|4.264018e+03|-1.412924e-05
- 5|4.263961e+03|-1.341165e-05
- 6|4.263946e+03|-3.586522e-06
+ 0|4.481482e+03|0.000000e+00
+ 1|4.469389e+03|-2.698549e-03
+ 2|4.468825e+03|-1.261217e-04
+ 3|4.468580e+03|-5.486064e-05
+ 4|4.468438e+03|-3.161220e-05
+ 5|4.468352e+03|-1.930800e-05
+ 6|4.468309e+03|-9.570658e-06
It. |Loss |Delta loss
--------------------------------
- 0|4.294523e+02|0.000000e+00
- 1|4.247737e+02|-1.089443e-02
- 2|4.245516e+02|-5.228765e-04
- 3|4.244430e+02|-2.557417e-04
- 4|4.243724e+02|-1.663904e-04
- 5|4.243196e+02|-1.244111e-04
- 6|4.242808e+02|-9.132500e-05
- 7|4.242497e+02|-7.331710e-05
- 8|4.242271e+02|-5.326612e-05
- 9|4.242063e+02|-4.916026e-05
- 10|4.241906e+02|-3.699617e-05
-
-
-plot data
-#############################################################################
-
-
-
-.. code-block:: python
-
-
- pl.figure(1, (10, 5))
- pl.clf()
- pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
- pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
- pl.legend(loc=0)
- pl.title('Source and target distributions')
-
-
-
-
-
-.. image:: /auto_examples/images/sphx_glr_plot_otda_mapping_001.png
- :align: center
-
-
-
-
-plot transported samples
+ 0|4.504654e+02|0.000000e+00
+ 1|4.461571e+02|-9.564116e-03
+ 2|4.459105e+02|-5.528043e-04
+ 3|4.457895e+02|-2.712398e-04
+ 4|4.457041e+02|-1.914829e-04
+ 5|4.456431e+02|-1.369704e-04
+ 6|4.456032e+02|-8.944784e-05
+ 7|4.455700e+02|-7.447824e-05
+ 8|4.455447e+02|-5.688965e-05
+ 9|4.455229e+02|-4.890051e-05
+ 10|4.455084e+02|-3.262490e-05
+
+
+Plot transported samples
#############################################################################
@@ -209,7 +208,7 @@ plot transported samples
-**Total running time of the script:** ( 0 minutes 0.853 seconds)
+**Total running time of the script:** ( 0 minutes 0.869 seconds)
@@ -228,4 +227,4 @@ plot transported samples
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/auto_examples/plot_otda_mapping_colors_images.ipynb b/docs/source/auto_examples/plot_otda_mapping_colors_images.ipynb
index 4b2ec02..c8c1d95 100644
--- a/docs/source/auto_examples/plot_otda_mapping_colors_images.ipynb
+++ b/docs/source/auto_examples/plot_otda_mapping_colors_images.ipynb
@@ -15,7 +15,7 @@
},
{
"source": [
- "\n====================================================================================\nOT for domain adaptation with image color adaptation [6] with mapping estimation [8]\n====================================================================================\n\n[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized\n discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3),\n 1853-1882.\n[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, \"Mapping estimation for\n discrete optimal transport\", Neural Information Processing Systems (NIPS),\n 2016.\n\n\n"
+ "\n# OT for image color adaptation with mapping estimation \n\n\nOT for domain adaptation with image color adaptation [6] with mapping \nestimation [8].\n\n[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized\n discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3),\n 1853-1882.\n[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, \"Mapping estimation for\n discrete optimal transport\", Neural Information Processing Systems (NIPS),\n 2016.\n\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -69,7 +69,7 @@
},
{
"source": [
- "plot original images\n#############################################################################\n\n"
+ "Plot original images\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -87,7 +87,7 @@
},
{
"source": [
- "plot pixel values distribution\n#############################################################################\n\n"
+ "Plot pixel values distribution\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
@@ -105,7 +105,7 @@
},
{
"source": [
- "plot transformed images\n#############################################################################\n\n"
+ "Plot transformed images\n#############################################################################\n\n"
],
"cell_type": "markdown",
"metadata": {}
diff --git a/docs/source/auto_examples/plot_otda_mapping_colors_images.py b/docs/source/auto_examples/plot_otda_mapping_colors_images.py
index 936206c..162c24b 100644
--- a/docs/source/auto_examples/plot_otda_mapping_colors_images.py
+++ b/docs/source/auto_examples/plot_otda_mapping_colors_images.py
@@ -1,8 +1,11 @@
# -*- coding: utf-8 -*-
"""
-====================================================================================
-OT for domain adaptation with image color adaptation [6] with mapping estimation [8]
-====================================================================================
+=====================================================
+OT for image color adaptation with mapping estimation
+=====================================================
+
+OT for domain adaptation with image color adaptation [6] with mapping
+estimation [8].
[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized
discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3),
@@ -93,7 +96,7 @@ Image_mapping_gaussian = minmax(mat2im(X1tn, I1.shape))
##############################################################################
-# plot original images
+# Plot original images
##############################################################################
pl.figure(1, figsize=(6.4, 3))
@@ -110,7 +113,7 @@ pl.tight_layout()
##############################################################################
-# plot pixel values distribution
+# Plot pixel values distribution
##############################################################################
pl.figure(2, figsize=(6.4, 5))
@@ -132,7 +135,7 @@ pl.tight_layout()
##############################################################################
-# plot transformed images
+# Plot transformed images
##############################################################################
pl.figure(2, figsize=(10, 5))
diff --git a/docs/source/auto_examples/plot_otda_mapping_colors_images.rst b/docs/source/auto_examples/plot_otda_mapping_colors_images.rst
index 1107067..29823f1 100644
--- a/docs/source/auto_examples/plot_otda_mapping_colors_images.rst
+++ b/docs/source/auto_examples/plot_otda_mapping_colors_images.rst
@@ -3,9 +3,12 @@
.. _sphx_glr_auto_examples_plot_otda_mapping_colors_images.py:
-====================================================================================
-OT for domain adaptation with image color adaptation [6] with mapping estimation [8]
-====================================================================================
+=====================================================
+OT for image color adaptation with mapping estimation
+=====================================================
+
+OT for domain adaptation with image color adaptation [6] with mapping
+estimation [8].
[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized
discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3),
@@ -129,42 +132,42 @@ Domain adaptation for pixel distribution transfer
It. |Loss |Delta loss
--------------------------------
- 0|3.680514e+02|0.000000e+00
- 1|3.592359e+02|-2.395185e-02
- 2|3.590581e+02|-4.947749e-04
- 3|3.589663e+02|-2.556471e-04
- 4|3.589095e+02|-1.582289e-04
- 5|3.588707e+02|-1.081994e-04
- 6|3.588423e+02|-7.911661e-05
- 7|3.588206e+02|-6.055473e-05
- 8|3.588034e+02|-4.778202e-05
- 9|3.587895e+02|-3.886420e-05
- 10|3.587781e+02|-3.182249e-05
- 11|3.587684e+02|-2.695669e-05
- 12|3.587602e+02|-2.298642e-05
- 13|3.587530e+02|-1.993240e-05
- 14|3.587468e+02|-1.736014e-05
- 15|3.587413e+02|-1.518037e-05
- 16|3.587365e+02|-1.358038e-05
- 17|3.587321e+02|-1.215346e-05
- 18|3.587282e+02|-1.091639e-05
- 19|3.587278e+02|-9.877929e-07
+ 0|3.680512e+02|0.000000e+00
+ 1|3.592454e+02|-2.392562e-02
+ 2|3.590671e+02|-4.960473e-04
+ 3|3.589736e+02|-2.604894e-04
+ 4|3.589161e+02|-1.602816e-04
+ 5|3.588766e+02|-1.099971e-04
+ 6|3.588476e+02|-8.084400e-05
+ 7|3.588256e+02|-6.131161e-05
+ 8|3.588083e+02|-4.807549e-05
+ 9|3.587943e+02|-3.899414e-05
+ 10|3.587827e+02|-3.245280e-05
+ 11|3.587729e+02|-2.721256e-05
+ 12|3.587646e+02|-2.316249e-05
+ 13|3.587574e+02|-2.000192e-05
+ 14|3.587512e+02|-1.748898e-05
+ 15|3.587457e+02|-1.535131e-05
+ 16|3.587408e+02|-1.366515e-05
+ 17|3.587364e+02|-1.210563e-05
+ 18|3.587325e+02|-1.097138e-05
+ 19|3.587310e+02|-4.099596e-06
It. |Loss |Delta loss
--------------------------------
- 0|3.784725e+02|0.000000e+00
- 1|3.646380e+02|-3.655332e-02
- 2|3.642858e+02|-9.660434e-04
- 3|3.641516e+02|-3.683776e-04
- 4|3.640785e+02|-2.008220e-04
- 5|3.640320e+02|-1.276966e-04
- 6|3.639999e+02|-8.796173e-05
- 7|3.639764e+02|-6.455658e-05
- 8|3.639583e+02|-4.976436e-05
- 9|3.639440e+02|-3.946556e-05
- 10|3.639322e+02|-3.222132e-05
-
-
-plot original images
+ 0|3.784805e+02|0.000000e+00
+ 1|3.646476e+02|-3.654847e-02
+ 2|3.642970e+02|-9.615381e-04
+ 3|3.641622e+02|-3.699897e-04
+ 4|3.640886e+02|-2.021154e-04
+ 5|3.640419e+02|-1.280913e-04
+ 6|3.640096e+02|-8.898145e-05
+ 7|3.639858e+02|-6.514301e-05
+ 8|3.639677e+02|-4.977195e-05
+ 9|3.639534e+02|-3.936050e-05
+ 10|3.639417e+02|-3.205223e-05
+
+
+Plot original images
#############################################################################
@@ -194,7 +197,7 @@ plot original images
-plot pixel values distribution
+Plot pixel values distribution
#############################################################################
@@ -229,7 +232,7 @@ plot pixel values distribution
-plot transformed images
+Plot transformed images
#############################################################################
@@ -280,7 +283,7 @@ plot transformed images
-**Total running time of the script:** ( 2 minutes 45.618 seconds)
+**Total running time of the script:** ( 2 minutes 12.535 seconds)
@@ -299,4 +302,4 @@ plot transformed images
.. rst-class:: sphx-glr-signature
- `Generated by Sphinx-Gallery <http://sphinx-gallery.readthedocs.io>`_
+ `Generated by Sphinx-Gallery <https://sphinx-gallery.readthedocs.io>`_
diff --git a/docs/source/conf.py b/docs/source/conf.py
index ffdb1a2..0a822e5 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -33,7 +33,7 @@ class Mock(MagicMock):
return MagicMock()
MOCK_MODULES = ['ot.lp.emd_wrap','autograd','pymanopt','cudamat','autograd.numpy','pymanopt.manifolds','pymanopt.solvers']
# 'autograd.numpy','pymanopt.manifolds','pymanopt.solvers',
-sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
+##sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# !!!!
# If extensions (or modules to document with autodoc) are in another directory,
@@ -328,7 +328,7 @@ intersphinx_mapping = {'https://docs.python.org/': None}
sphinx_gallery_conf = {
'examples_dirs': ['../../examples','../../examples/da'],
'gallery_dirs': 'auto_examples',
- 'mod_example_dir': '../modules/generated/',
+ 'backreferences_dir': '../modules/generated/',
'reference_url': {
'numpy': 'http://docs.scipy.org/doc/numpy-1.9.1',
'scipy': 'http://docs.scipy.org/doc/scipy-0.17.0/reference'}
diff --git a/docs/source/examples.rst b/docs/source/examples.rst
deleted file mode 100644
index f209543..0000000
--- a/docs/source/examples.rst
+++ /dev/null
@@ -1,39 +0,0 @@
-
-
-Examples
-============
-
-1D Optimal transport
----------------------
-
-.. literalinclude:: ../../examples/demo_OT_1D.py
-
-2D Optimal transport on empirical distributions
------------------------------------------------
-
-.. literalinclude:: ../../examples/demo_OT_2D_samples.py
-
-1D Wasserstein barycenter
--------------------------
-
-.. literalinclude:: ../../examples/demo_barycenter_1D.py
-
-OT with user provided regularization
-------------------------------------
-
-.. literalinclude:: ../../examples/demo_optim_OTreg.py
-
-Domain adaptation with optimal transport
-----------------------------------------
-
-.. literalinclude:: ../../examples/demo_OTDA_classes.py
-
-Color transfer in images
-------------------------
-
-.. literalinclude:: ../../examples/demo_OTDA_color_images.py
-
-OT mapping estimation for domain adaptation
--------------------------------------------
-
-.. literalinclude:: ../../examples/demo_OTDA_mapping.py
diff --git a/examples/README.txt b/examples/README.txt
index f8643b8..c3d556d 100644
--- a/examples/README.txt
+++ b/examples/README.txt
@@ -1,2 +1,4 @@
POT Examples
============
+
+This is a gallery of all the POT example files.
diff --git a/examples/plot_OT_1D.py b/examples/plot_OT_1D.py
index a1473c4..a63f29a 100644
--- a/examples/plot_OT_1D.py
+++ b/examples/plot_OT_1D.py
@@ -4,7 +4,7 @@
1D optimal transport
====================
-This example illustrate the computation of EMD and Sinkhorn transport plans
+This example illustrates the computation of EMD and Sinkhorn transport plans
and their visualization.
"""
diff --git a/examples/plot_OT_2D_samples.py b/examples/plot_OT_2D_samples.py
index a913b8c..f57d631 100644
--- a/examples/plot_OT_2D_samples.py
+++ b/examples/plot_OT_2D_samples.py
@@ -4,6 +4,9 @@
2D Optimal transport between empirical distributions
====================================================
+Illustration of 2D optimal transport between discributions that are weighted
+sum of diracs. The OT matrix is plotted with the samples.
+
"""
# Author: Remi Flamary <remi.flamary@unice.fr>
diff --git a/examples/plot_OT_L1_vs_L2.py b/examples/plot_OT_L1_vs_L2.py
index dfc9462..77bde22 100644
--- a/examples/plot_OT_L1_vs_L2.py
+++ b/examples/plot_OT_L1_vs_L2.py
@@ -4,6 +4,8 @@
2D Optimal transport for different metrics
==========================================
+2D OT on empirical distributio with different gound metric.
+
Stole the figure idea from Fig. 1 and 2 in
https://arxiv.org/pdf/1706.07650.pdf
@@ -18,98 +20,190 @@ import numpy as np
import matplotlib.pylab as pl
import ot
-#%% parameters and data generation
-
-for data in range(2):
-
- if data:
- n = 20 # nb samples
- xs = np.zeros((n, 2))
- xs[:, 0] = np.arange(n) + 1
- xs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...
-
- xt = np.zeros((n, 2))
- xt[:, 1] = np.arange(n) + 1
- else:
-
- n = 50 # nb samples
- xtot = np.zeros((n + 1, 2))
- xtot[:, 0] = np.cos(
- (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
- xtot[:, 1] = np.sin(
- (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
-
- xs = xtot[:n, :]
- xt = xtot[1:, :]
-
- a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
-
- # loss matrix
- M1 = ot.dist(xs, xt, metric='euclidean')
- M1 /= M1.max()
-
- # loss matrix
- M2 = ot.dist(xs, xt, metric='sqeuclidean')
- M2 /= M2.max()
-
- # loss matrix
- Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
- Mp /= Mp.max()
-
- #%% plot samples
-
- pl.figure(1 + 3 * data, figsize=(7, 3))
- pl.clf()
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- pl.title('Source and traget distributions')
-
- pl.figure(2 + 3 * data, figsize=(7, 3))
-
- pl.subplot(1, 3, 1)
- pl.imshow(M1, interpolation='nearest')
- pl.title('Euclidean cost')
-
- pl.subplot(1, 3, 2)
- pl.imshow(M2, interpolation='nearest')
- pl.title('Squared Euclidean cost')
-
- pl.subplot(1, 3, 3)
- pl.imshow(Mp, interpolation='nearest')
- pl.title('Sqrt Euclidean cost')
- pl.tight_layout()
-
- #%% EMD
- G1 = ot.emd(a, b, M1)
- G2 = ot.emd(a, b, M2)
- Gp = ot.emd(a, b, Mp)
-
- pl.figure(3 + 3 * data, figsize=(7, 3))
-
- pl.subplot(1, 3, 1)
- ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT Euclidean')
-
- pl.subplot(1, 3, 2)
- ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT squared Euclidean')
-
- pl.subplot(1, 3, 3)
- ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
- pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
- pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
- pl.axis('equal')
- # pl.legend(loc=0)
- pl.title('OT sqrt Euclidean')
- pl.tight_layout()
+##############################################################################
+# Dataset 1 : uniform sampling
+##############################################################################
+
+n = 20 # nb samples
+xs = np.zeros((n, 2))
+xs[:, 0] = np.arange(n) + 1
+xs[:, 1] = (np.arange(n) + 1) * -0.001 # to make it strictly convex...
+
+xt = np.zeros((n, 2))
+xt[:, 1] = np.arange(n) + 1
+
+a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
+
+# loss matrix
+M1 = ot.dist(xs, xt, metric='euclidean')
+M1 /= M1.max()
+
+# loss matrix
+M2 = ot.dist(xs, xt, metric='sqeuclidean')
+M2 /= M2.max()
+
+# loss matrix
+Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
+Mp /= Mp.max()
+
+# Data
+pl.figure(1, figsize=(7, 3))
+pl.clf()
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+pl.title('Source and traget distributions')
+
+
+# Cost matrices
+pl.figure(2, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+pl.imshow(M1, interpolation='nearest')
+pl.title('Euclidean cost')
+
+pl.subplot(1, 3, 2)
+pl.imshow(M2, interpolation='nearest')
+pl.title('Squared Euclidean cost')
+
+pl.subplot(1, 3, 3)
+pl.imshow(Mp, interpolation='nearest')
+pl.title('Sqrt Euclidean cost')
+pl.tight_layout()
+
+##############################################################################
+# Dataset 1 : Plot OT Matrices
+##############################################################################
+
+
+
+#%% EMD
+G1 = ot.emd(a, b, M1)
+G2 = ot.emd(a, b, M2)
+Gp = ot.emd(a, b, Mp)
+
+# OT matrices
+pl.figure(3, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT Euclidean')
+
+pl.subplot(1, 3, 2)
+ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT squared Euclidean')
+
+pl.subplot(1, 3, 3)
+ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT sqrt Euclidean')
+pl.tight_layout()
+
+pl.show()
+
+
+##############################################################################
+# Dataset 2 : Partial circle
+##############################################################################
+
+n = 50 # nb samples
+xtot = np.zeros((n + 1, 2))
+xtot[:, 0] = np.cos(
+ (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
+xtot[:, 1] = np.sin(
+ (np.arange(n + 1) + 1.0) * 0.9 / (n + 2) * 2 * np.pi)
+
+xs = xtot[:n, :]
+xt = xtot[1:, :]
+
+a, b = ot.unif(n), ot.unif(n) # uniform distribution on samples
+
+# loss matrix
+M1 = ot.dist(xs, xt, metric='euclidean')
+M1 /= M1.max()
+
+# loss matrix
+M2 = ot.dist(xs, xt, metric='sqeuclidean')
+M2 /= M2.max()
+
+# loss matrix
+Mp = np.sqrt(ot.dist(xs, xt, metric='euclidean'))
+Mp /= Mp.max()
+
+
+# Data
+pl.figure(4, figsize=(7, 3))
+pl.clf()
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+pl.title('Source and traget distributions')
+
+
+# Cost matrices
+pl.figure(5, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+pl.imshow(M1, interpolation='nearest')
+pl.title('Euclidean cost')
+
+pl.subplot(1, 3, 2)
+pl.imshow(M2, interpolation='nearest')
+pl.title('Squared Euclidean cost')
+
+pl.subplot(1, 3, 3)
+pl.imshow(Mp, interpolation='nearest')
+pl.title('Sqrt Euclidean cost')
+pl.tight_layout()
+
+##############################################################################
+# Dataset 2 : Plot OT Matrices
+##############################################################################
+
+
+
+#%% EMD
+G1 = ot.emd(a, b, M1)
+G2 = ot.emd(a, b, M2)
+Gp = ot.emd(a, b, Mp)
+
+# OT matrices
+pl.figure(6, figsize=(7, 3))
+
+pl.subplot(1, 3, 1)
+ot.plot.plot2D_samples_mat(xs, xt, G1, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT Euclidean')
+
+pl.subplot(1, 3, 2)
+ot.plot.plot2D_samples_mat(xs, xt, G2, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT squared Euclidean')
+
+pl.subplot(1, 3, 3)
+ot.plot.plot2D_samples_mat(xs, xt, Gp, c=[.5, .5, 1])
+pl.plot(xs[:, 0], xs[:, 1], '+b', label='Source samples')
+pl.plot(xt[:, 0], xt[:, 1], 'xr', label='Target samples')
+pl.axis('equal')
+# pl.legend(loc=0)
+pl.title('OT sqrt Euclidean')
+pl.tight_layout()
pl.show()
diff --git a/examples/plot_barycenter_1D.py b/examples/plot_barycenter_1D.py
index f3be247..142b05e 100644
--- a/examples/plot_barycenter_1D.py
+++ b/examples/plot_barycenter_1D.py
@@ -4,7 +4,7 @@
1D Wasserstein barycenter demo
==============================
-This example illustrate the computation of regularized Wassersyein Barycenter
+This example illustrates the computation of regularized Wassersyein Barycenter
as proposed in [3].
@@ -25,6 +25,9 @@ import ot
from mpl_toolkits.mplot3d import Axes3D # noqa
from matplotlib.collections import PolyCollection
+##############################################################################
+# Generate data
+##############################################################################
#%% parameters
@@ -45,6 +48,10 @@ n_distributions = A.shape[1]
M = ot.utils.dist0(n)
M /= M.max()
+##############################################################################
+# Plot data
+##############################################################################
+
#%% plot the distributions
pl.figure(1, figsize=(6.4, 3))
@@ -53,6 +60,10 @@ for i in range(n_distributions):
pl.title('Distributions')
pl.tight_layout()
+##############################################################################
+# Barycenter computation
+##############################################################################
+
#%% barycenter computation
alpha = 0.2 # 0<=alpha<=1
@@ -79,6 +90,10 @@ pl.legend()
pl.title('Barycenters')
pl.tight_layout()
+##############################################################################
+# Barycentric interpolation
+##############################################################################
+
#%% barycenter interpolation
n_alpha = 11
diff --git a/examples/plot_compute_emd.py b/examples/plot_compute_emd.py
index 704da0e..b688f93 100644
--- a/examples/plot_compute_emd.py
+++ b/examples/plot_compute_emd.py
@@ -4,6 +4,10 @@
Plot multiple EMD
=================
+Shows how to compute multiple EMD and Sinkhorn with two differnt
+ground metrics and plot their values for diffeent distributions.
+
+
"""
# Author: Remi Flamary <remi.flamary@unice.fr>
diff --git a/examples/plot_optim_OTreg.py b/examples/plot_optim_OTreg.py
index 95bcdaf..b362662 100644
--- a/examples/plot_optim_OTreg.py
+++ b/examples/plot_optim_OTreg.py
@@ -4,8 +4,8 @@
Regularized OT with generic solver
==================================
-This example illustrate the use of the generic solver for regularized OT with
-user designed regularization term. It uses Conditional gradient as in [6] and
+Illustrates the use of the generic solver for regularized OT with
+user-designed regularization term. It uses Conditional gradient as in [6] and
generalized Conditional Gradient as proposed in [5][7].
diff --git a/examples/plot_otda_mapping.py b/examples/plot_otda_mapping.py
index e0da2d8..e78fef4 100644
--- a/examples/plot_otda_mapping.py
+++ b/examples/plot_otda_mapping.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
"""
-===============================================
-OT mapping estimation for domain adaptation [8]
-===============================================
+===========================================
+OT mapping estimation for domain adaptation
+===========================================
This example presents how to use MappingTransport to estimate at the same
time both the coupling transport and approximate the transport map with either
@@ -24,7 +24,7 @@ import ot
##############################################################################
-# generate data
+# Generate data
##############################################################################
n_source_samples = 100
@@ -44,7 +44,7 @@ Xt[yt == 2] *= 3
Xt = Xt + 4
##############################################################################
-# plot data
+# Plot data
##############################################################################
pl.figure(1, (10, 5))
diff --git a/examples/plot_otda_mapping_colors_images.py b/examples/plot_otda_mapping_colors_images.py
index a8b2ca8..162c24b 100644
--- a/examples/plot_otda_mapping_colors_images.py
+++ b/examples/plot_otda_mapping_colors_images.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
"""
-===============================================
-OT for color adaptation with mapping estimation
-===============================================
+=====================================================
+OT for image color adaptation with mapping estimation
+=====================================================
OT for domain adaptation with image color adaptation [6] with mapping
estimation [8].