summaryrefslogtreecommitdiff
path: root/docs/source/auto_examples/plot_otda_mapping.ipynb
diff options
context:
space:
mode:
Diffstat (limited to 'docs/source/auto_examples/plot_otda_mapping.ipynb')
-rw-r--r--docs/source/auto_examples/plot_otda_mapping.ipynb126
1 files changed, 126 insertions, 0 deletions
diff --git a/docs/source/auto_examples/plot_otda_mapping.ipynb b/docs/source/auto_examples/plot_otda_mapping.ipynb
new file mode 100644
index 0000000..898466d
--- /dev/null
+++ b/docs/source/auto_examples/plot_otda_mapping.ipynb
@@ -0,0 +1,126 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "%matplotlib inline"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n# OT mapping estimation for domain adaptation\n\n\nThis example presents how to use MappingTransport to estimate at the same\ntime both the coupling transport and approximate the transport map with either\na linear or a kernelized mapping as introduced in [8].\n\n[8] M. Perrot, N. Courty, R. Flamary, A. Habrard,\n \"Mapping estimation for discrete optimal transport\",\n Neural Information Processing Systems (NIPS), 2016.\n\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "# Authors: Remi Flamary <remi.flamary@unice.fr>\n# Stanislas Chambon <stan.chambon@gmail.com>\n#\n# License: MIT License\n\nimport numpy as np\nimport matplotlib.pylab as pl\nimport ot"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Generate data\n-------------\n\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "n_source_samples = 100\nn_target_samples = 100\ntheta = 2 * np.pi / 20\nnoise_level = 0.1\n\nXs, ys = ot.datasets.make_data_classif(\n 'gaussrot', n_source_samples, nz=noise_level)\nXs_new, _ = ot.datasets.make_data_classif(\n 'gaussrot', n_source_samples, nz=noise_level)\nXt, yt = ot.datasets.make_data_classif(\n 'gaussrot', n_target_samples, theta=theta, nz=noise_level)\n\n# one of the target mode changes its variance (no linear mapping)\nXt[yt == 2] *= 3\nXt = Xt + 4"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Plot data\n---------\n\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "pl.figure(1, (10, 5))\npl.clf()\npl.scatter(Xs[:, 0], Xs[:, 1], c=ys, marker='+', label='Source samples')\npl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o', label='Target samples')\npl.legend(loc=0)\npl.title('Source and target distributions')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Instantiate the different transport algorithms and fit them\n-----------------------------------------------------------\n\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "# MappingTransport with linear kernel\not_mapping_linear = ot.da.MappingTransport(\n kernel=\"linear\", mu=1e0, eta=1e-8, bias=True,\n max_iter=20, verbose=True)\n\not_mapping_linear.fit(Xs=Xs, Xt=Xt)\n\n# for original source samples, transform applies barycentric mapping\ntransp_Xs_linear = ot_mapping_linear.transform(Xs=Xs)\n\n# for out of source samples, transform applies the linear mapping\ntransp_Xs_linear_new = ot_mapping_linear.transform(Xs=Xs_new)\n\n\n# MappingTransport with gaussian kernel\not_mapping_gaussian = ot.da.MappingTransport(\n kernel=\"gaussian\", eta=1e-5, mu=1e-1, bias=True, sigma=1,\n max_iter=10, verbose=True)\not_mapping_gaussian.fit(Xs=Xs, Xt=Xt)\n\n# for original source samples, transform applies barycentric mapping\ntransp_Xs_gaussian = ot_mapping_gaussian.transform(Xs=Xs)\n\n# for out of source samples, transform applies the gaussian mapping\ntransp_Xs_gaussian_new = ot_mapping_gaussian.transform(Xs=Xs_new)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Plot transported samples\n------------------------\n\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "pl.figure(2)\npl.clf()\npl.subplot(2, 2, 1)\npl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',\n label='Target samples', alpha=.2)\npl.scatter(transp_Xs_linear[:, 0], transp_Xs_linear[:, 1], c=ys, marker='+',\n label='Mapped source samples')\npl.title(\"Bary. mapping (linear)\")\npl.legend(loc=0)\n\npl.subplot(2, 2, 2)\npl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',\n label='Target samples', alpha=.2)\npl.scatter(transp_Xs_linear_new[:, 0], transp_Xs_linear_new[:, 1],\n c=ys, marker='+', label='Learned mapping')\npl.title(\"Estim. mapping (linear)\")\n\npl.subplot(2, 2, 3)\npl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',\n label='Target samples', alpha=.2)\npl.scatter(transp_Xs_gaussian[:, 0], transp_Xs_gaussian[:, 1], c=ys,\n marker='+', label='barycentric mapping')\npl.title(\"Bary. mapping (kernel)\")\n\npl.subplot(2, 2, 4)\npl.scatter(Xt[:, 0], Xt[:, 1], c=yt, marker='o',\n label='Target samples', alpha=.2)\npl.scatter(transp_Xs_gaussian_new[:, 0], transp_Xs_gaussian_new[:, 1], c=ys,\n marker='+', label='Learned mapping')\npl.title(\"Estim. mapping (kernel)\")\npl.tight_layout()\n\npl.show()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+} \ No newline at end of file