summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorNicolas Courty <ncourty@irisa.fr>2017-09-01 01:25:02 +0200
committerGitHub <noreply@github.com>2017-09-01 01:25:02 +0200
commit986f46ddde3ce2f550cb56f66620df377326423d (patch)
tree7fcff22d8a53d16b5b36fe395172fdc9a2c7e44d /examples
parentbc68cc3e8b23ad7d542518ba8ffa665094d57663 (diff)
parent16697047eff9326a0ecb483317c13a854a3d3a71 (diff)
Merge branch 'master' into gromov
Diffstat (limited to 'examples')
-rw-r--r--examples/da/plot_otda_classes.py150
-rw-r--r--examples/da/plot_otda_color_images.py165
-rw-r--r--examples/da/plot_otda_d2.py173
-rw-r--r--examples/da/plot_otda_mapping.py126
-rw-r--r--examples/da/plot_otda_mapping_colors_images.py171
-rw-r--r--examples/plot_OTDA_2D.py126
-rw-r--r--examples/plot_OTDA_classes.py117
-rw-r--r--examples/plot_OTDA_color_images.py152
-rw-r--r--examples/plot_OTDA_mapping.py124
-rw-r--r--examples/plot_OTDA_mapping_color_images.py169
10 files changed, 785 insertions, 688 deletions
diff --git a/examples/da/plot_otda_classes.py b/examples/da/plot_otda_classes.py
new file mode 100644
index 0000000..ec57a37
--- /dev/null
+++ b/examples/da/plot_otda_classes.py
@@ -0,0 +1,150 @@
+# -*- coding: utf-8 -*-
+"""
+========================
+OT for domain adaptation
+========================
+
+This example introduces a domain adaptation in a 2D setting and the 4 OTDA
+approaches currently supported in POT.
+
+"""
+
+# Authors: Remi Flamary <remi.flamary@unice.fr>
+# Stanislas Chambon <stan.chambon@gmail.com>
+#
+# License: MIT License
+
+import matplotlib.pylab as pl
+import ot
+
+
+##############################################################################
+# generate data
+##############################################################################
+
+n_source_samples = 150
+n_target_samples = 150
+
+Xs, ys = ot.datasets.get_data_classif('3gauss', n_source_samples)
+Xt, yt = ot.datasets.get_data_classif('3gauss2', n_target_samples)
+
+
+##############################################################################
+# Instantiate the different transport algorithms and fit them
+##############################################################################
+
+# EMD Transport
+ot_emd = ot.da.EMDTransport()
+ot_emd.fit(Xs=Xs, Xt=Xt)
+
+# Sinkhorn Transport
+ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
+ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
+
+# Sinkhorn Transport with Group lasso regularization
+ot_lpl1 = ot.da.SinkhornLpl1Transport(reg_e=1e-1, reg_cl=1e0)
+ot_lpl1.fit(Xs=Xs, ys=ys, Xt=Xt)
+
+# Sinkhorn Transport with Group lasso regularization l1l2
+ot_l1l2 = ot.da.SinkhornL1l2Transport(reg_e=1e-1, reg_cl=2e0, max_iter=20,
+ verbose=True)
+ot_l1l2.fit(Xs=Xs, ys=ys, Xt=Xt)
+
+# transport source samples onto target samples
+transp_Xs_emd = ot_emd.transform(Xs=Xs)
+transp_Xs_sinkhorn = ot_sinkhorn.transform(Xs=Xs)
+transp_Xs_lpl1 = ot_lpl1.transform(Xs=Xs)
+transp_Xs_l1l2 = ot_l1l2.transform(Xs=Xs)
+
+
+##############################################################################
+# Fig 1 : plots source and target samples
+##############################################################################
+
+pl.figure(1, figsize=(10, 5))
+pl.subplot(1, 2, 1)
+pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+pl.xticks([])
+pl.yticks([])
+pl.legend(loc=0)
+pl.title('Source samples')
+
+pl.subplot(1, 2, 2)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+pl.xticks([])
+pl.yticks([])
+pl.legend(loc=0)
+pl.title('Target samples')
+pl.tight_layout()
+
+
+##############################################################################
+# Fig 2 : plot optimal couplings and transported samples
+##############################################################################
+
+param_img = {'interpolation': 'nearest', 'cmap': 'spectral'}
+
+pl.figure(2, figsize=(15, 8))
+pl.subplot(2, 4, 1)
+pl.imshow(ot_emd.coupling_, **param_img)
+pl.xticks([])
+pl.yticks([])
+pl.title('Optimal coupling\nEMDTransport')
+
+pl.subplot(2, 4, 2)
+pl.imshow(ot_sinkhorn.coupling_, **param_img)
+pl.xticks([])
+pl.yticks([])
+pl.title('Optimal coupling\nSinkhornTransport')
+
+pl.subplot(2, 4, 3)
+pl.imshow(ot_lpl1.coupling_, **param_img)
+pl.xticks([])
+pl.yticks([])
+pl.title('Optimal coupling\nSinkhornLpl1Transport')
+
+pl.subplot(2, 4, 4)
+pl.imshow(ot_l1l2.coupling_, **param_img)
+pl.xticks([])
+pl.yticks([])
+pl.title('Optimal coupling\nSinkhornL1l2Transport')
+
+pl.subplot(2, 4, 5)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=0.3)
+pl.scatter(transp_Xs_emd[:, 0], transp_Xs_emd[:, 1], c=ys,
+ marker='+', label='Transp samples', s=30)
+pl.xticks([])
+pl.yticks([])
+pl.title('Transported samples\nEmdTransport')
+pl.legend(loc="lower left")
+
+pl.subplot(2, 4, 6)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=0.3)
+pl.scatter(transp_Xs_sinkhorn[:, 0], transp_Xs_sinkhorn[:, 1], c=ys,
+ marker='+', label='Transp samples', s=30)
+pl.xticks([])
+pl.yticks([])
+pl.title('Transported samples\nSinkhornTransport')
+
+pl.subplot(2, 4, 7)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=0.3)
+pl.scatter(transp_Xs_lpl1[:, 0], transp_Xs_lpl1[:, 1], c=ys,
+ marker='+', label='Transp samples', s=30)
+pl.xticks([])
+pl.yticks([])
+pl.title('Transported samples\nSinkhornLpl1Transport')
+
+pl.subplot(2, 4, 8)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=0.3)
+pl.scatter(transp_Xs_l1l2[:, 0], transp_Xs_l1l2[:, 1], c=ys,
+ marker='+', label='Transp samples', s=30)
+pl.xticks([])
+pl.yticks([])
+pl.title('Transported samples\nSinkhornL1l2Transport')
+pl.tight_layout()
+
+pl.show()
diff --git a/examples/da/plot_otda_color_images.py b/examples/da/plot_otda_color_images.py
new file mode 100644
index 0000000..3984afb
--- /dev/null
+++ b/examples/da/plot_otda_color_images.py
@@ -0,0 +1,165 @@
+# -*- coding: utf-8 -*-
+"""
+========================================================
+OT for domain adaptation with image color adaptation [6]
+========================================================
+
+This example presents a way of transferring colors between two image
+with Optimal Transport as introduced in [6]
+
+[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014).
+Regularized discrete optimal transport.
+SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
+"""
+
+# Authors: Remi Flamary <remi.flamary@unice.fr>
+# Stanislas Chambon <stan.chambon@gmail.com>
+#
+# License: MIT License
+
+import numpy as np
+from scipy import ndimage
+import matplotlib.pylab as pl
+import ot
+
+
+r = np.random.RandomState(42)
+
+
+def im2mat(I):
+ """Converts and image to matrix (one pixel per line)"""
+ return I.reshape((I.shape[0] * I.shape[1], I.shape[2]))
+
+
+def mat2im(X, shape):
+ """Converts back a matrix to an image"""
+ return X.reshape(shape)
+
+
+def minmax(I):
+ return np.clip(I, 0, 1)
+
+
+##############################################################################
+# generate data
+##############################################################################
+
+# Loading images
+I1 = ndimage.imread('../../data/ocean_day.jpg').astype(np.float64) / 256
+I2 = ndimage.imread('../../data/ocean_sunset.jpg').astype(np.float64) / 256
+
+X1 = im2mat(I1)
+X2 = im2mat(I2)
+
+# training samples
+nb = 1000
+idx1 = r.randint(X1.shape[0], size=(nb,))
+idx2 = r.randint(X2.shape[0], size=(nb,))
+
+Xs = X1[idx1, :]
+Xt = X2[idx2, :]
+
+
+##############################################################################
+# Instantiate the different transport algorithms and fit them
+##############################################################################
+
+# EMDTransport
+ot_emd = ot.da.EMDTransport()
+ot_emd.fit(Xs=Xs, Xt=Xt)
+
+# SinkhornTransport
+ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
+ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
+
+# prediction between images (using out of sample prediction as in [6])
+transp_Xs_emd = ot_emd.transform(Xs=X1)
+transp_Xt_emd = ot_emd.inverse_transform(Xt=X2)
+
+transp_Xs_sinkhorn = ot_emd.transform(Xs=X1)
+transp_Xt_sinkhorn = ot_emd.inverse_transform(Xt=X2)
+
+I1t = minmax(mat2im(transp_Xs_emd, I1.shape))
+I2t = minmax(mat2im(transp_Xt_emd, I2.shape))
+
+I1te = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))
+I2te = minmax(mat2im(transp_Xt_sinkhorn, I2.shape))
+
+
+##############################################################################
+# plot original image
+##############################################################################
+
+pl.figure(1, figsize=(6.4, 3))
+
+pl.subplot(1, 2, 1)
+pl.imshow(I1)
+pl.axis('off')
+pl.title('Image 1')
+
+pl.subplot(1, 2, 2)
+pl.imshow(I2)
+pl.axis('off')
+pl.title('Image 2')
+
+
+##############################################################################
+# scatter plot of colors
+##############################################################################
+
+pl.figure(2, figsize=(6.4, 3))
+
+pl.subplot(1, 2, 1)
+pl.scatter(Xs[:, 0], Xs[:, 2], c=Xs)
+pl.axis([0, 1, 0, 1])
+pl.xlabel('Red')
+pl.ylabel('Blue')
+pl.title('Image 1')
+
+pl.subplot(1, 2, 2)
+pl.scatter(Xt[:, 0], Xt[:, 2], c=Xt)
+pl.axis([0, 1, 0, 1])
+pl.xlabel('Red')
+pl.ylabel('Blue')
+pl.title('Image 2')
+pl.tight_layout()
+
+
+##############################################################################
+# plot new images
+##############################################################################
+
+pl.figure(3, figsize=(8, 4))
+
+pl.subplot(2, 3, 1)
+pl.imshow(I1)
+pl.axis('off')
+pl.title('Image 1')
+
+pl.subplot(2, 3, 2)
+pl.imshow(I1t)
+pl.axis('off')
+pl.title('Image 1 Adapt')
+
+pl.subplot(2, 3, 3)
+pl.imshow(I1te)
+pl.axis('off')
+pl.title('Image 1 Adapt (reg)')
+
+pl.subplot(2, 3, 4)
+pl.imshow(I2)
+pl.axis('off')
+pl.title('Image 2')
+
+pl.subplot(2, 3, 5)
+pl.imshow(I2t)
+pl.axis('off')
+pl.title('Image 2 Adapt')
+
+pl.subplot(2, 3, 6)
+pl.imshow(I2te)
+pl.axis('off')
+pl.title('Image 2 Adapt (reg)')
+pl.tight_layout()
+
+pl.show()
diff --git a/examples/da/plot_otda_d2.py b/examples/da/plot_otda_d2.py
new file mode 100644
index 0000000..3daa0a6
--- /dev/null
+++ b/examples/da/plot_otda_d2.py
@@ -0,0 +1,173 @@
+# -*- coding: utf-8 -*-
+"""
+==============================
+OT for empirical distributions
+==============================
+
+This example introduces a domain adaptation in a 2D setting. It explicits
+the problem of domain adaptation and introduces some optimal transport
+approaches to solve it.
+
+Quantities such as optimal couplings, greater coupling coefficients and
+transported samples are represented in order to give a visual understanding
+of what the transport methods are doing.
+"""
+
+# Authors: Remi Flamary <remi.flamary@unice.fr>
+# Stanislas Chambon <stan.chambon@gmail.com>
+#
+# License: MIT License
+
+import matplotlib.pylab as pl
+import ot
+
+
+##############################################################################
+# generate data
+##############################################################################
+
+n_samples_source = 150
+n_samples_target = 150
+
+Xs, ys = ot.datasets.get_data_classif('3gauss', n_samples_source)
+Xt, yt = ot.datasets.get_data_classif('3gauss2', n_samples_target)
+
+# Cost matrix
+M = ot.dist(Xs, Xt, metric='sqeuclidean')
+
+
+##############################################################################
+# Instantiate the different transport algorithms and fit them
+##############################################################################
+
+# EMD Transport
+ot_emd = ot.da.EMDTransport()
+ot_emd.fit(Xs=Xs, Xt=Xt)
+
+# Sinkhorn Transport
+ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
+ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
+
+# Sinkhorn Transport with Group lasso regularization
+ot_lpl1 = ot.da.SinkhornLpl1Transport(reg_e=1e-1, reg_cl=1e0)
+ot_lpl1.fit(Xs=Xs, ys=ys, Xt=Xt)
+
+# transport source samples onto target samples
+transp_Xs_emd = ot_emd.transform(Xs=Xs)
+transp_Xs_sinkhorn = ot_sinkhorn.transform(Xs=Xs)
+transp_Xs_lpl1 = ot_lpl1.transform(Xs=Xs)
+
+
+##############################################################################
+# Fig 1 : plots source and target samples + matrix of pairwise distance
+##############################################################################
+
+pl.figure(1, figsize=(10, 10))
+pl.subplot(2, 2, 1)
+pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+pl.xticks([])
+pl.yticks([])
+pl.legend(loc=0)
+pl.title('Source samples')
+
+pl.subplot(2, 2, 2)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+pl.xticks([])
+pl.yticks([])
+pl.legend(loc=0)
+pl.title('Target samples')
+
+pl.subplot(2, 2, 3)
+pl.imshow(M, interpolation='nearest')
+pl.xticks([])
+pl.yticks([])
+pl.title('Matrix of pairwise distances')
+pl.tight_layout()
+
+
+##############################################################################
+# Fig 2 : plots optimal couplings for the different methods
+##############################################################################
+
+pl.figure(2, figsize=(10, 6))
+
+pl.subplot(2, 3, 1)
+pl.imshow(ot_emd.coupling_, interpolation='nearest')
+pl.xticks([])
+pl.yticks([])
+pl.title('Optimal coupling\nEMDTransport')
+
+pl.subplot(2, 3, 2)
+pl.imshow(ot_sinkhorn.coupling_, interpolation='nearest')
+pl.xticks([])
+pl.yticks([])
+pl.title('Optimal coupling\nSinkhornTransport')
+
+pl.subplot(2, 3, 3)
+pl.imshow(ot_lpl1.coupling_, interpolation='nearest')
+pl.xticks([])
+pl.yticks([])
+pl.title('Optimal coupling\nSinkhornLpl1Transport')
+
+pl.subplot(2, 3, 4)
+ot.plot.plot2D_samples_mat(Xs, Xt, ot_emd.coupling_, c=[.5, .5, 1])
+pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+pl.xticks([])
+pl.yticks([])
+pl.title('Main coupling coefficients\nEMDTransport')
+
+pl.subplot(2, 3, 5)
+ot.plot.plot2D_samples_mat(Xs, Xt, ot_sinkhorn.coupling_, c=[.5, .5, 1])
+pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+pl.xticks([])
+pl.yticks([])
+pl.title('Main coupling coefficients\nSinkhornTransport')
+
+pl.subplot(2, 3, 6)
+ot.plot.plot2D_samples_mat(Xs, Xt, ot_lpl1.coupling_, c=[.5, .5, 1])
+pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+pl.xticks([])
+pl.yticks([])
+pl.title('Main coupling coefficients\nSinkhornLpl1Transport')
+pl.tight_layout()
+
+
+##############################################################################
+# Fig 3 : plot transported samples
+##############################################################################
+
+# display transported samples
+pl.figure(4, figsize=(10, 4))
+pl.subplot(1, 3, 1)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=0.5)
+pl.scatter(transp_Xs_emd[:, 0], transp_Xs_emd[:, 1], c=ys,
+ marker='+', label='Transp samples', s=30)
+pl.title('Transported samples\nEmdTransport')
+pl.legend(loc=0)
+pl.xticks([])
+pl.yticks([])
+
+pl.subplot(1, 3, 2)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=0.5)
+pl.scatter(transp_Xs_sinkhorn[:, 0], transp_Xs_sinkhorn[:, 1], c=ys,
+ marker='+', label='Transp samples', s=30)
+pl.title('Transported samples\nSinkhornTransport')
+pl.xticks([])
+pl.yticks([])
+
+pl.subplot(1, 3, 3)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=0.5)
+pl.scatter(transp_Xs_lpl1[:, 0], transp_Xs_lpl1[:, 1], c=ys,
+ marker='+', label='Transp samples', s=30)
+pl.title('Transported samples\nSinkhornLpl1Transport')
+pl.xticks([])
+pl.yticks([])
+
+pl.tight_layout()
+pl.show()
diff --git a/examples/da/plot_otda_mapping.py b/examples/da/plot_otda_mapping.py
new file mode 100644
index 0000000..09d2cb4
--- /dev/null
+++ b/examples/da/plot_otda_mapping.py
@@ -0,0 +1,126 @@
+# -*- coding: utf-8 -*-
+"""
+===============================================
+OT mapping estimation for domain adaptation [8]
+===============================================
+
+This example presents how to use MappingTransport to estimate at the same
+time both the coupling transport and approximate the transport map with either
+a linear or a kernelized mapping as introduced in [8]
+
+[8] M. Perrot, N. Courty, R. Flamary, A. Habrard,
+ "Mapping estimation for discrete optimal transport",
+ Neural Information Processing Systems (NIPS), 2016.
+"""
+
+# Authors: Remi Flamary <remi.flamary@unice.fr>
+# Stanislas Chambon <stan.chambon@gmail.com>
+#
+# License: MIT License
+
+import numpy as np
+import matplotlib.pylab as pl
+import ot
+
+
+##############################################################################
+# generate data
+##############################################################################
+
+n_source_samples = 100
+n_target_samples = 100
+theta = 2 * np.pi / 20
+noise_level = 0.1
+
+Xs, ys = ot.datasets.get_data_classif(
+ 'gaussrot', n_source_samples, nz=noise_level)
+Xs_new, _ = ot.datasets.get_data_classif(
+ 'gaussrot', n_source_samples, nz=noise_level)
+Xt, yt = ot.datasets.get_data_classif(
+ 'gaussrot', n_target_samples, theta=theta, nz=noise_level)
+
+# one of the target mode changes its variance (no linear mapping)
+Xt[yt == 2] *= 3
+Xt = Xt + 4
+
+
+##############################################################################
+# Instantiate the different transport algorithms and fit them
+##############################################################################
+
+# MappingTransport with linear kernel
+ot_mapping_linear = ot.da.MappingTransport(
+ kernel="linear", mu=1e0, eta=1e-8, bias=True,
+ max_iter=20, verbose=True)
+
+ot_mapping_linear.fit(Xs=Xs, Xt=Xt)
+
+# for original source samples, transform applies barycentric mapping
+transp_Xs_linear = ot_mapping_linear.transform(Xs=Xs)
+
+# for out of source samples, transform applies the linear mapping
+transp_Xs_linear_new = ot_mapping_linear.transform(Xs=Xs_new)
+
+
+# MappingTransport with gaussian kernel
+ot_mapping_gaussian = ot.da.MappingTransport(
+ kernel="gaussian", eta=1e-5, mu=1e-1, bias=True, sigma=1,
+ max_iter=10, verbose=True)
+ot_mapping_gaussian.fit(Xs=Xs, Xt=Xt)
+
+# for original source samples, transform applies barycentric mapping
+transp_Xs_gaussian = ot_mapping_gaussian.transform(Xs=Xs)
+
+# for out of source samples, transform applies the gaussian mapping
+transp_Xs_gaussian_new = ot_mapping_gaussian.transform(Xs=Xs_new)
+
+
+##############################################################################
+# plot data
+##############################################################################
+
+pl.figure(1, (10, 5))
+pl.clf()
+pl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')
+pl.legend(loc=0)
+pl.title('Source and target distributions')
+
+
+##############################################################################
+# plot transported samples
+##############################################################################
+
+pl.figure(2)
+pl.clf()
+pl.subplot(2, 2, 1)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=.2)
+pl.scatter(transp_Xs_linear[:, 0], transp_Xs_linear[:, 1], c=ys, marker='+',
+ label='Mapped source samples')
+pl.title("Bary. mapping (linear)")
+pl.legend(loc=0)
+
+pl.subplot(2, 2, 2)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=.2)
+pl.scatter(transp_Xs_linear_new[:, 0], transp_Xs_linear_new[:, 1],
+ c=ys, marker='+', label='Learned mapping')
+pl.title("Estim. mapping (linear)")
+
+pl.subplot(2, 2, 3)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=.2)
+pl.scatter(transp_Xs_gaussian[:, 0], transp_Xs_gaussian[:, 1], c=ys,
+ marker='+', label='barycentric mapping')
+pl.title("Bary. mapping (kernel)")
+
+pl.subplot(2, 2, 4)
+pl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',
+ label='Target samples', alpha=.2)
+pl.scatter(transp_Xs_gaussian_new[:, 0], transp_Xs_gaussian_new[:, 1], c=ys,
+ marker='+', label='Learned mapping')
+pl.title("Estim. mapping (kernel)")
+pl.tight_layout()
+
+pl.show()
diff --git a/examples/da/plot_otda_mapping_colors_images.py b/examples/da/plot_otda_mapping_colors_images.py
new file mode 100644
index 0000000..a628b05
--- /dev/null
+++ b/examples/da/plot_otda_mapping_colors_images.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+"""
+====================================================================================
+OT for domain adaptation with image color adaptation [6] with mapping estimation [8]
+====================================================================================
+
+[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized
+ discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3),
+ 1853-1882.
+[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, "Mapping estimation for
+ discrete optimal transport", Neural Information Processing Systems (NIPS),
+ 2016.
+
+"""
+
+# Authors: Remi Flamary <remi.flamary@unice.fr>
+# Stanislas Chambon <stan.chambon@gmail.com>
+#
+# License: MIT License
+
+import numpy as np
+from scipy import ndimage
+import matplotlib.pylab as pl
+import ot
+
+r = np.random.RandomState(42)
+
+
+def im2mat(I):
+ """Converts and image to matrix (one pixel per line)"""
+ return I.reshape((I.shape[0] * I.shape[1], I.shape[2]))
+
+
+def mat2im(X, shape):
+ """Converts back a matrix to an image"""
+ return X.reshape(shape)
+
+
+def minmax(I):
+ return np.clip(I, 0, 1)
+
+
+##############################################################################
+# Generate data
+##############################################################################
+
+# Loading images
+I1 = ndimage.imread('../../data/ocean_day.jpg').astype(np.float64) / 256
+I2 = ndimage.imread('../../data/ocean_sunset.jpg').astype(np.float64) / 256
+
+
+X1 = im2mat(I1)
+X2 = im2mat(I2)
+
+# training samples
+nb = 1000
+idx1 = r.randint(X1.shape[0], size=(nb,))
+idx2 = r.randint(X2.shape[0], size=(nb,))
+
+Xs = X1[idx1, :]
+Xt = X2[idx2, :]
+
+
+##############################################################################
+# Domain adaptation for pixel distribution transfer
+##############################################################################
+
+# EMDTransport
+ot_emd = ot.da.EMDTransport()
+ot_emd.fit(Xs=Xs, Xt=Xt)
+transp_Xs_emd = ot_emd.transform(Xs=X1)
+Image_emd = minmax(mat2im(transp_Xs_emd, I1.shape))
+
+# SinkhornTransport
+ot_sinkhorn = ot.da.SinkhornTransport(reg_e=1e-1)
+ot_sinkhorn.fit(Xs=Xs, Xt=Xt)
+transp_Xs_sinkhorn = ot_emd.transform(Xs=X1)
+Image_sinkhorn = minmax(mat2im(transp_Xs_sinkhorn, I1.shape))
+
+ot_mapping_linear = ot.da.MappingTransport(
+ mu=1e0, eta=1e-8, bias=True, max_iter=20, verbose=True)
+ot_mapping_linear.fit(Xs=Xs, Xt=Xt)
+
+X1tl = ot_mapping_linear.transform(Xs=X1)
+Image_mapping_linear = minmax(mat2im(X1tl, I1.shape))
+
+ot_mapping_gaussian = ot.da.MappingTransport(
+ mu=1e0, eta=1e-2, sigma=1, bias=False, max_iter=10, verbose=True)
+ot_mapping_gaussian.fit(Xs=Xs, Xt=Xt)
+
+X1tn = ot_mapping_gaussian.transform(Xs=X1) # use the estimated mapping
+Image_mapping_gaussian = minmax(mat2im(X1tn, I1.shape))
+
+
+##############################################################################
+# plot original images
+##############################################################################
+
+pl.figure(1, figsize=(6.4, 3))
+pl.subplot(1, 2, 1)
+pl.imshow(I1)
+pl.axis('off')
+pl.title('Image 1')
+
+pl.subplot(1, 2, 2)
+pl.imshow(I2)
+pl.axis('off')
+pl.title('Image 2')
+pl.tight_layout()
+
+
+##############################################################################
+# plot pixel values distribution
+##############################################################################
+
+pl.figure(2, figsize=(6.4, 5))
+
+pl.subplot(1, 2, 1)
+pl.scatter(Xs[:, 0], Xs[:, 2], c=Xs)
+pl.axis([0, 1, 0, 1])
+pl.xlabel('Red')
+pl.ylabel('Blue')
+pl.title('Image 1')
+
+pl.subplot(1, 2, 2)
+pl.scatter(Xt[:, 0], Xt[:, 2], c=Xt)
+pl.axis([0, 1, 0, 1])
+pl.xlabel('Red')
+pl.ylabel('Blue')
+pl.title('Image 2')
+pl.tight_layout()
+
+
+##############################################################################
+# plot transformed images
+##############################################################################
+
+pl.figure(2, figsize=(10, 5))
+
+pl.subplot(2, 3, 1)
+pl.imshow(I1)
+pl.axis('off')
+pl.title('Im. 1')
+
+pl.subplot(2, 3, 4)
+pl.imshow(I2)
+pl.axis('off')
+pl.title('Im. 2')
+
+pl.subplot(2, 3, 2)
+pl.imshow(Image_emd)
+pl.axis('off')
+pl.title('EmdTransport')
+
+pl.subplot(2, 3, 5)
+pl.imshow(Image_sinkhorn)
+pl.axis('off')
+pl.title('SinkhornTransport')
+
+pl.subplot(2, 3, 3)
+pl.imshow(Image_mapping_linear)
+pl.axis('off')
+pl.title('MappingTransport (linear)')
+
+pl.subplot(2, 3, 6)
+pl.imshow(Image_mapping_gaussian)
+pl.axis('off')
+pl.title('MappingTransport (gaussian)')
+pl.tight_layout()
+
+pl.show()
diff --git a/examples/plot_OTDA_2D.py b/examples/plot_OTDA_2D.py
deleted file mode 100644
index f2108c6..0000000
--- a/examples/plot_OTDA_2D.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-==============================
-OT for empirical distributions
-==============================
-
-"""
-
-# Author: Remi Flamary <remi.flamary@unice.fr>
-#
-# License: MIT License
-
-import numpy as np
-import matplotlib.pylab as pl
-import ot
-
-
-#%% parameters
-
-n = 150 # nb bins
-
-xs, ys = ot.datasets.get_data_classif('3gauss', n)
-xt, yt = ot.datasets.get_data_classif('3gauss2', n)
-
-a, b = ot.unif(n), ot.unif(n)
-# loss matrix
-M = ot.dist(xs, xt)
-# M/=M.max()
-
-#%% plot samples
-
-pl.figure(1)
-pl.subplot(2, 2, 1)
-pl.scatter(xs[:, 0], xs[:, 1], c=ys, marker='+', label='Source samples')
-pl.legend(loc=0)
-pl.title('Source distributions')
-
-pl.subplot(2, 2, 2)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o', label='Target samples')
-pl.legend(loc=0)
-pl.title('target distributions')
-
-pl.figure(2)
-pl.imshow(M, interpolation='nearest')
-pl.title('Cost matrix M')
-
-
-#%% OT estimation
-
-# EMD
-G0 = ot.emd(a, b, M)
-
-# sinkhorn
-lambd = 1e-1
-Gs = ot.sinkhorn(a, b, M, lambd)
-
-
-# Group lasso regularization
-reg = 1e-1
-eta = 1e0
-Gg = ot.da.sinkhorn_lpl1_mm(a, ys.astype(np.int), b, M, reg, eta)
-
-
-#%% visu matrices
-
-pl.figure(3)
-
-pl.subplot(2, 3, 1)
-pl.imshow(G0, interpolation='nearest')
-pl.title('OT matrix ')
-
-pl.subplot(2, 3, 2)
-pl.imshow(Gs, interpolation='nearest')
-pl.title('OT matrix Sinkhorn')
-
-pl.subplot(2, 3, 3)
-pl.imshow(Gg, interpolation='nearest')
-pl.title('OT matrix Group lasso')
-
-pl.subplot(2, 3, 4)
-ot.plot.plot2D_samples_mat(xs, xt, G0, c=[.5, .5, 1])
-pl.scatter(xs[:, 0], xs[:, 1], c=ys, marker='+', label='Source samples')
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o', label='Target samples')
-
-
-pl.subplot(2, 3, 5)
-ot.plot.plot2D_samples_mat(xs, xt, Gs, c=[.5, .5, 1])
-pl.scatter(xs[:, 0], xs[:, 1], c=ys, marker='+', label='Source samples')
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o', label='Target samples')
-
-pl.subplot(2, 3, 6)
-ot.plot.plot2D_samples_mat(xs, xt, Gg, c=[.5, .5, 1])
-pl.scatter(xs[:, 0], xs[:, 1], c=ys, marker='+', label='Source samples')
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o', label='Target samples')
-pl.tight_layout()
-
-#%% sample interpolation
-
-xst0 = n * G0.dot(xt)
-xsts = n * Gs.dot(xt)
-xstg = n * Gg.dot(xt)
-
-pl.figure(4, figsize=(8, 3))
-pl.subplot(1, 3, 1)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=0.5)
-pl.scatter(xst0[:, 0], xst0[:, 1], c=ys,
- marker='+', label='Transp samples', s=30)
-pl.title('Interp samples')
-pl.legend(loc=0)
-
-pl.subplot(1, 3, 2)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=0.5)
-pl.scatter(xsts[:, 0], xsts[:, 1], c=ys,
- marker='+', label='Transp samples', s=30)
-pl.title('Interp samples Sinkhorn')
-
-pl.subplot(1, 3, 3)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=0.5)
-pl.scatter(xstg[:, 0], xstg[:, 1], c=ys,
- marker='+', label='Transp samples', s=30)
-pl.title('Interp samples Grouplasso')
-pl.tight_layout()
-pl.show()
diff --git a/examples/plot_OTDA_classes.py b/examples/plot_OTDA_classes.py
deleted file mode 100644
index 53e4bae..0000000
--- a/examples/plot_OTDA_classes.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-========================
-OT for domain adaptation
-========================
-
-"""
-
-# Author: Remi Flamary <remi.flamary@unice.fr>
-#
-# License: MIT License
-
-import matplotlib.pylab as pl
-import ot
-
-
-#%% parameters
-
-n = 150 # nb samples in source and target datasets
-
-xs, ys = ot.datasets.get_data_classif('3gauss', n)
-xt, yt = ot.datasets.get_data_classif('3gauss2', n)
-
-
-#%% plot samples
-
-pl.figure(1, figsize=(6.4, 3))
-
-pl.subplot(1, 2, 1)
-pl.scatter(xs[:, 0], xs[:, 1], c=ys, marker='+', label='Source samples')
-pl.legend(loc=0)
-pl.title('Source distributions')
-
-pl.subplot(1, 2, 2)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o', label='Target samples')
-pl.legend(loc=0)
-pl.title('target distributions')
-
-
-#%% OT estimation
-
-# LP problem
-da_emd = ot.da.OTDA() # init class
-da_emd.fit(xs, xt) # fit distributions
-xst0 = da_emd.interp() # interpolation of source samples
-
-# sinkhorn regularization
-lambd = 1e-1
-da_entrop = ot.da.OTDA_sinkhorn()
-da_entrop.fit(xs, xt, reg=lambd)
-xsts = da_entrop.interp()
-
-# non-convex Group lasso regularization
-reg = 1e-1
-eta = 1e0
-da_lpl1 = ot.da.OTDA_lpl1()
-da_lpl1.fit(xs, ys, xt, reg=reg, eta=eta)
-xstg = da_lpl1.interp()
-
-# True Group lasso regularization
-reg = 1e-1
-eta = 2e0
-da_l1l2 = ot.da.OTDA_l1l2()
-da_l1l2.fit(xs, ys, xt, reg=reg, eta=eta, numItermax=20, verbose=True)
-xstgl = da_l1l2.interp()
-
-#%% plot interpolated source samples
-
-param_img = {'interpolation': 'nearest', 'cmap': 'spectral'}
-
-pl.figure(2, figsize=(8, 4.5))
-pl.subplot(2, 4, 1)
-pl.imshow(da_emd.G, **param_img)
-pl.title('OT matrix')
-
-pl.subplot(2, 4, 2)
-pl.imshow(da_entrop.G, **param_img)
-pl.title('OT matrix\nsinkhorn')
-
-pl.subplot(2, 4, 3)
-pl.imshow(da_lpl1.G, **param_img)
-pl.title('OT matrix\nnon-convex Group Lasso')
-
-pl.subplot(2, 4, 4)
-pl.imshow(da_l1l2.G, **param_img)
-pl.title('OT matrix\nGroup Lasso')
-
-pl.subplot(2, 4, 5)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=0.3)
-pl.scatter(xst0[:, 0], xst0[:, 1], c=ys,
- marker='+', label='Transp samples', s=30)
-pl.title('Interp samples')
-pl.legend(loc=0)
-
-pl.subplot(2, 4, 6)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=0.3)
-pl.scatter(xsts[:, 0], xsts[:, 1], c=ys,
- marker='+', label='Transp samples', s=30)
-pl.title('Interp samples\nSinkhorn')
-
-pl.subplot(2, 4, 7)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=0.3)
-pl.scatter(xstg[:, 0], xstg[:, 1], c=ys,
- marker='+', label='Transp samples', s=30)
-pl.title('Interp samples\nnon-convex Group Lasso')
-
-pl.subplot(2, 4, 8)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=0.3)
-pl.scatter(xstgl[:, 0], xstgl[:, 1], c=ys,
- marker='+', label='Transp samples', s=30)
-pl.title('Interp samples\nGroup Lasso')
-pl.tight_layout()
-pl.show()
diff --git a/examples/plot_OTDA_color_images.py b/examples/plot_OTDA_color_images.py
deleted file mode 100644
index c5ff873..0000000
--- a/examples/plot_OTDA_color_images.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-========================================================
-OT for domain adaptation with image color adaptation [6]
-========================================================
-
-[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014).
-Regularized discrete optimal transport.
-SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
-"""
-
-# Author: Remi Flamary <remi.flamary@unice.fr>
-#
-# License: MIT License
-
-import numpy as np
-from scipy import ndimage
-import matplotlib.pylab as pl
-import ot
-
-
-#%% Loading images
-
-I1 = ndimage.imread('../data/ocean_day.jpg').astype(np.float64) / 256
-I2 = ndimage.imread('../data/ocean_sunset.jpg').astype(np.float64) / 256
-
-#%% Plot images
-
-pl.figure(1, figsize=(6.4, 3))
-
-pl.subplot(1, 2, 1)
-pl.imshow(I1)
-pl.axis('off')
-pl.title('Image 1')
-
-pl.subplot(1, 2, 2)
-pl.imshow(I2)
-pl.axis('off')
-pl.title('Image 2')
-
-pl.show()
-
-#%% Image conversion and dataset generation
-
-
-def im2mat(I):
- """Converts and image to matrix (one pixel per line)"""
- return I.reshape((I.shape[0] * I.shape[1], I.shape[2]))
-
-
-def mat2im(X, shape):
- """Converts back a matrix to an image"""
- return X.reshape(shape)
-
-
-X1 = im2mat(I1)
-X2 = im2mat(I2)
-
-# training samples
-nb = 1000
-idx1 = np.random.randint(X1.shape[0], size=(nb,))
-idx2 = np.random.randint(X2.shape[0], size=(nb,))
-
-xs = X1[idx1, :]
-xt = X2[idx2, :]
-
-#%% Plot image distributions
-
-
-pl.figure(2, figsize=(6.4, 3))
-
-pl.subplot(1, 2, 1)
-pl.scatter(xs[:, 0], xs[:, 2], c=xs)
-pl.axis([0, 1, 0, 1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 1')
-
-pl.subplot(1, 2, 2)
-pl.scatter(xt[:, 0], xt[:, 2], c=xt)
-pl.axis([0, 1, 0, 1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 2')
-pl.tight_layout()
-
-#%% domain adaptation between images
-
-# LP problem
-da_emd = ot.da.OTDA() # init class
-da_emd.fit(xs, xt) # fit distributions
-
-# sinkhorn regularization
-lambd = 1e-1
-da_entrop = ot.da.OTDA_sinkhorn()
-da_entrop.fit(xs, xt, reg=lambd)
-
-#%% prediction between images (using out of sample prediction as in [6])
-
-X1t = da_emd.predict(X1)
-X2t = da_emd.predict(X2, -1)
-
-X1te = da_entrop.predict(X1)
-X2te = da_entrop.predict(X2, -1)
-
-
-def minmax(I):
- return np.clip(I, 0, 1)
-
-
-I1t = minmax(mat2im(X1t, I1.shape))
-I2t = minmax(mat2im(X2t, I2.shape))
-
-I1te = minmax(mat2im(X1te, I1.shape))
-I2te = minmax(mat2im(X2te, I2.shape))
-
-#%% plot all images
-
-pl.figure(2, figsize=(8, 4))
-
-pl.subplot(2, 3, 1)
-pl.imshow(I1)
-pl.axis('off')
-pl.title('Image 1')
-
-pl.subplot(2, 3, 2)
-pl.imshow(I1t)
-pl.axis('off')
-pl.title('Image 1 Adapt')
-
-pl.subplot(2, 3, 3)
-pl.imshow(I1te)
-pl.axis('off')
-pl.title('Image 1 Adapt (reg)')
-
-pl.subplot(2, 3, 4)
-pl.imshow(I2)
-pl.axis('off')
-pl.title('Image 2')
-
-pl.subplot(2, 3, 5)
-pl.imshow(I2t)
-pl.axis('off')
-pl.title('Image 2 Adapt')
-
-pl.subplot(2, 3, 6)
-pl.imshow(I2te)
-pl.axis('off')
-pl.title('Image 2 Adapt (reg)')
-pl.tight_layout()
-
-pl.show()
diff --git a/examples/plot_OTDA_mapping.py b/examples/plot_OTDA_mapping.py
deleted file mode 100644
index a0d7f8b..0000000
--- a/examples/plot_OTDA_mapping.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-===============================================
-OT mapping estimation for domain adaptation [8]
-===============================================
-
-[8] M. Perrot, N. Courty, R. Flamary, A. Habrard,
- "Mapping estimation for discrete optimal transport",
- Neural Information Processing Systems (NIPS), 2016.
-"""
-
-# Author: Remi Flamary <remi.flamary@unice.fr>
-#
-# License: MIT License
-
-import numpy as np
-import matplotlib.pylab as pl
-import ot
-
-
-#%% dataset generation
-
-np.random.seed(0) # makes example reproducible
-
-n = 100 # nb samples in source and target datasets
-theta = 2 * np.pi / 20
-nz = 0.1
-xs, ys = ot.datasets.get_data_classif('gaussrot', n, nz=nz)
-xt, yt = ot.datasets.get_data_classif('gaussrot', n, theta=theta, nz=nz)
-
-# one of the target mode changes its variance (no linear mapping)
-xt[yt == 2] *= 3
-xt = xt + 4
-
-
-#%% plot samples
-
-pl.figure(1, (6.4, 3))
-pl.clf()
-pl.scatter(xs[:, 0], xs[:, 1], c=ys, marker='+', label='Source samples')
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o', label='Target samples')
-pl.legend(loc=0)
-pl.title('Source and target distributions')
-
-
-#%% OT linear mapping estimation
-
-eta = 1e-8 # quadratic regularization for regression
-mu = 1e0 # weight of the OT linear term
-bias = True # estimate a bias
-
-ot_mapping = ot.da.OTDA_mapping_linear()
-ot_mapping.fit(xs, xt, mu=mu, eta=eta, bias=bias, numItermax=20, verbose=True)
-
-xst = ot_mapping.predict(xs) # use the estimated mapping
-xst0 = ot_mapping.interp() # use barycentric mapping
-
-
-pl.figure(2)
-pl.clf()
-pl.subplot(2, 2, 1)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=.3)
-pl.scatter(xst0[:, 0], xst0[:, 1], c=ys,
- marker='+', label='barycentric mapping')
-pl.title("barycentric mapping")
-
-pl.subplot(2, 2, 2)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=.3)
-pl.scatter(xst[:, 0], xst[:, 1], c=ys, marker='+', label='Learned mapping')
-pl.title("Learned mapping")
-pl.tight_layout()
-
-#%% Kernel mapping estimation
-
-eta = 1e-5 # quadratic regularization for regression
-mu = 1e-1 # weight of the OT linear term
-bias = True # estimate a bias
-sigma = 1 # sigma bandwidth fot gaussian kernel
-
-
-ot_mapping_kernel = ot.da.OTDA_mapping_kernel()
-ot_mapping_kernel.fit(
- xs, xt, mu=mu, eta=eta, sigma=sigma, bias=bias, numItermax=10, verbose=True)
-
-xst_kernel = ot_mapping_kernel.predict(xs) # use the estimated mapping
-xst0_kernel = ot_mapping_kernel.interp() # use barycentric mapping
-
-
-#%% Plotting the mapped samples
-
-pl.figure(2)
-pl.clf()
-pl.subplot(2, 2, 1)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=.2)
-pl.scatter(xst0[:, 0], xst0[:, 1], c=ys, marker='+',
- label='Mapped source samples')
-pl.title("Bary. mapping (linear)")
-pl.legend(loc=0)
-
-pl.subplot(2, 2, 2)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=.2)
-pl.scatter(xst[:, 0], xst[:, 1], c=ys, marker='+', label='Learned mapping')
-pl.title("Estim. mapping (linear)")
-
-pl.subplot(2, 2, 3)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=.2)
-pl.scatter(xst0_kernel[:, 0], xst0_kernel[:, 1], c=ys,
- marker='+', label='barycentric mapping')
-pl.title("Bary. mapping (kernel)")
-
-pl.subplot(2, 2, 4)
-pl.scatter(xt[:, 0], xt[:, 1], c=yt, marker='o',
- label='Target samples', alpha=.2)
-pl.scatter(xst_kernel[:, 0], xst_kernel[:, 1], c=ys,
- marker='+', label='Learned mapping')
-pl.title("Estim. mapping (kernel)")
-pl.tight_layout()
-
-pl.show()
diff --git a/examples/plot_OTDA_mapping_color_images.py b/examples/plot_OTDA_mapping_color_images.py
deleted file mode 100644
index 8064b25..0000000
--- a/examples/plot_OTDA_mapping_color_images.py
+++ /dev/null
@@ -1,169 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-====================================================================================
-OT for domain adaptation with image color adaptation [6] with mapping estimation [8]
-====================================================================================
-
-[6] Ferradans, S., Papadakis, N., Peyre, G., & Aujol, J. F. (2014). Regularized
- discrete optimal transport. SIAM Journal on Imaging Sciences, 7(3), 1853-1882.
-[8] M. Perrot, N. Courty, R. Flamary, A. Habrard, "Mapping estimation for
- discrete optimal transport", Neural Information Processing Systems (NIPS), 2016.
-
-"""
-
-# Author: Remi Flamary <remi.flamary@unice.fr>
-#
-# License: MIT License
-
-import numpy as np
-from scipy import ndimage
-import matplotlib.pylab as pl
-import ot
-
-
-#%% Loading images
-
-I1 = ndimage.imread('../data/ocean_day.jpg').astype(np.float64) / 256
-I2 = ndimage.imread('../data/ocean_sunset.jpg').astype(np.float64) / 256
-
-#%% Plot images
-
-pl.figure(1, figsize=(6.4, 3))
-pl.subplot(1, 2, 1)
-pl.imshow(I1)
-pl.axis('off')
-pl.title('Image 1')
-
-pl.subplot(1, 2, 2)
-pl.imshow(I2)
-pl.axis('off')
-pl.title('Image 2')
-pl.tight_layout()
-
-
-#%% Image conversion and dataset generation
-
-def im2mat(I):
- """Converts and image to matrix (one pixel per line)"""
- return I.reshape((I.shape[0] * I.shape[1], I.shape[2]))
-
-
-def mat2im(X, shape):
- """Converts back a matrix to an image"""
- return X.reshape(shape)
-
-
-X1 = im2mat(I1)
-X2 = im2mat(I2)
-
-# training samples
-nb = 1000
-idx1 = np.random.randint(X1.shape[0], size=(nb,))
-idx2 = np.random.randint(X2.shape[0], size=(nb,))
-
-xs = X1[idx1, :]
-xt = X2[idx2, :]
-
-#%% Plot image distributions
-
-
-pl.figure(2, figsize=(6.4, 5))
-
-pl.subplot(1, 2, 1)
-pl.scatter(xs[:, 0], xs[:, 2], c=xs)
-pl.axis([0, 1, 0, 1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 1')
-
-pl.subplot(1, 2, 2)
-pl.scatter(xt[:, 0], xt[:, 2], c=xt)
-pl.axis([0, 1, 0, 1])
-pl.xlabel('Red')
-pl.ylabel('Blue')
-pl.title('Image 2')
-pl.tight_layout()
-
-
-#%% domain adaptation between images
-
-def minmax(I):
- return np.clip(I, 0, 1)
-
-
-# LP problem
-da_emd = ot.da.OTDA() # init class
-da_emd.fit(xs, xt) # fit distributions
-
-X1t = da_emd.predict(X1) # out of sample
-I1t = minmax(mat2im(X1t, I1.shape))
-
-# sinkhorn regularization
-lambd = 1e-1
-da_entrop = ot.da.OTDA_sinkhorn()
-da_entrop.fit(xs, xt, reg=lambd)
-
-X1te = da_entrop.predict(X1)
-I1te = minmax(mat2im(X1te, I1.shape))
-
-# linear mapping estimation
-eta = 1e-8 # quadratic regularization for regression
-mu = 1e0 # weight of the OT linear term
-bias = True # estimate a bias
-
-ot_mapping = ot.da.OTDA_mapping_linear()
-ot_mapping.fit(xs, xt, mu=mu, eta=eta, bias=bias, numItermax=20, verbose=True)
-
-X1tl = ot_mapping.predict(X1) # use the estimated mapping
-I1tl = minmax(mat2im(X1tl, I1.shape))
-
-# nonlinear mapping estimation
-eta = 1e-2 # quadratic regularization for regression
-mu = 1e0 # weight of the OT linear term
-bias = False # estimate a bias
-sigma = 1 # sigma bandwidth fot gaussian kernel
-
-
-ot_mapping_kernel = ot.da.OTDA_mapping_kernel()
-ot_mapping_kernel.fit(
- xs, xt, mu=mu, eta=eta, sigma=sigma, bias=bias, numItermax=10, verbose=True)
-
-X1tn = ot_mapping_kernel.predict(X1) # use the estimated mapping
-I1tn = minmax(mat2im(X1tn, I1.shape))
-
-#%% plot images
-
-pl.figure(2, figsize=(8, 4))
-
-pl.subplot(2, 3, 1)
-pl.imshow(I1)
-pl.axis('off')
-pl.title('Im. 1')
-
-pl.subplot(2, 3, 2)
-pl.imshow(I2)
-pl.axis('off')
-pl.title('Im. 2')
-
-pl.subplot(2, 3, 3)
-pl.imshow(I1t)
-pl.axis('off')
-pl.title('Im. 1 Interp LP')
-
-pl.subplot(2, 3, 4)
-pl.imshow(I1te)
-pl.axis('off')
-pl.title('Im. 1 Interp Entrop')
-
-pl.subplot(2, 3, 5)
-pl.imshow(I1tl)
-pl.axis('off')
-pl.title('Im. 1 Linear mapping')
-
-pl.subplot(2, 3, 6)
-pl.imshow(I1tn)
-pl.axis('off')
-pl.title('Im. 1 nonlinear mapping')
-pl.tight_layout()
-
-pl.show()