diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/cmake/modules/GUDHI_options.cmake | 11 | ||||
-rw-r--r-- | src/common/doc/installation.h | 2 | ||||
-rw-r--r-- | src/python/CMakeLists.txt | 4 | ||||
-rw-r--r-- | src/python/doc/datasets.inc (renamed from src/python/doc/datasets_generators.inc) | 4 | ||||
-rw-r--r-- | src/python/doc/datasets.rst (renamed from src/python/doc/datasets_generators.rst) | 27 | ||||
-rw-r--r-- | src/python/doc/img/bunny.png | bin | 0 -> 48040 bytes | |||
-rw-r--r-- | src/python/doc/index.rst | 6 | ||||
-rw-r--r-- | src/python/gudhi/datasets/remote.py | 206 | ||||
-rw-r--r-- | src/python/test/test_remote_datasets.py | 119 |
9 files changed, 365 insertions, 14 deletions
diff --git a/src/cmake/modules/GUDHI_options.cmake b/src/cmake/modules/GUDHI_options.cmake index 3cd0a489..c75b72f5 100644 --- a/src/cmake/modules/GUDHI_options.cmake +++ b/src/cmake/modules/GUDHI_options.cmake @@ -1,5 +1,6 @@ -option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF) -option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF) -option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON) -option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON) -option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON) +option(WITH_GUDHI_BENCHMARK "Activate/deactivate benchmark compilation" OFF) +option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation and installation" OFF) +option(WITH_GUDHI_REMOTE_TEST "Activate/deactivate datasets fetching test which uses the Internet" OFF) +option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) +option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON) +option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON) diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index 67d026bd..afbad14d 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -40,6 +40,8 @@ make \endverbatim * `make test` is using <a href="https://cmake.org/cmake/help/latest/manual/ctest.1.html">Ctest</a> (CMake test driver * program). If some of the tests are failing, please send us the result of the following command: * \verbatim ctest --output-on-failure \endverbatim + * Testing fetching datasets feature requires the use of the internet and is disabled by default. If you want to include this test, set WITH_GUDHI_REMOTE_TEST to ON when building in the previous step (note that this test is included in the python module): + * \verbatim cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON --DWITH_GUDHI_PYTHON=ON .. \endverbatim * * \subsection documentationgeneration Documentation * To generate the documentation, <a target="_blank" href="http://www.doxygen.org/">Doxygen</a> is required. diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index e31af02e..63a9bbea 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -595,6 +595,10 @@ if(PYTHONINTERP_FOUND) add_gudhi_py_test(test_dtm_rips_complex) endif() + # Fetch remote datasets + if(WITH_GUDHI_REMOTE_TEST) + add_gudhi_py_test(test_remote_datasets) + endif() # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") diff --git a/src/python/doc/datasets_generators.inc b/src/python/doc/datasets.inc index 8d169275..95a87678 100644 --- a/src/python/doc/datasets_generators.inc +++ b/src/python/doc/datasets.inc @@ -2,7 +2,7 @@ :widths: 30 40 30 +-----------------------------------+--------------------------------------------+--------------------------------------------------------------------------------------+ - | .. figure:: | Datasets generators (points). | :Authors: Hind Montassif | + | .. figure:: | Datasets either generated or fetched. | :Authors: Hind Montassif | | img/sphere_3d.png | | | | | | :Since: GUDHI 3.5.0 | | | | | @@ -10,5 +10,5 @@ | | | | | | | :Requires: `CGAL <installation.html#cgal>`_ | +-----------------------------------+--------------------------------------------+--------------------------------------------------------------------------------------+ - | * :doc:`datasets_generators` | + | * :doc:`datasets` | +-----------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/doc/datasets_generators.rst b/src/python/doc/datasets.rst index 260c3882..62b7dca0 100644 --- a/src/python/doc/datasets_generators.rst +++ b/src/python/doc/datasets.rst @@ -3,12 +3,14 @@ .. To get rid of WARNING: document isn't included in any toctree -=========================== -Datasets generators manual -=========================== +================ +Datasets manual +================ -We provide the generation of different customizable datasets to use as inputs for Gudhi complexes and data structures. +Datasets generators +=================== +We provide the generation of different customizable datasets to use as inputs for Gudhi complexes and data structures. Points generators ------------------ @@ -103,3 +105,20 @@ Example .. autofunction:: gudhi.datasets.generators.points.torus + + +Fetching datasets +================= + +We provide some ready-to-use datasets that are not available by default when getting GUDHI, and need to be fetched explicitly. + +.. figure:: ./img/bunny.png + :figclass: align-center + + 3D Stanford bunny with 35947 vertices. + + +.. automodule:: gudhi.datasets.remote + :members: + :special-members: + :show-inheritance: diff --git a/src/python/doc/img/bunny.png b/src/python/doc/img/bunny.png Binary files differnew file mode 100644 index 00000000..769aa530 --- /dev/null +++ b/src/python/doc/img/bunny.png diff --git a/src/python/doc/index.rst b/src/python/doc/index.rst index 2d7921ae..35f4ba46 100644 --- a/src/python/doc/index.rst +++ b/src/python/doc/index.rst @@ -92,7 +92,7 @@ Clustering .. include:: clustering.inc -Datasets generators -******************* +Datasets +******** -.. include:: datasets_generators.inc +.. include:: datasets.inc diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py new file mode 100644 index 00000000..8b3baef4 --- /dev/null +++ b/src/python/gudhi/datasets/remote.py @@ -0,0 +1,206 @@ +# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. +# Author(s): Hind Montassif +# +# Copyright (C) 2021 Inria +# +# Modification(s): +# - YYYY/MM Author: Description of the modification + +from os.path import join, exists, expanduser +from os import makedirs, remove + +from urllib.request import urlretrieve +import hashlib +import shutil + +import numpy as np + +def get_data_home(data_home = None): + """ + Return the path of the remote datasets directory. + This folder is used to store remotely fetched datasets. + By default the datasets directory is set to a folder named 'gudhi_data' in the user home folder. + Alternatively, it can be set by giving an explicit folder path. The '~' symbol is expanded to the user home folder. + If the folder does not already exist, it is automatically created. + + Parameters + ---------- + data_home : string + The path to remote datasets directory. Default is `None`, meaning that the data home directory will be set to "~/gudhi_data". + + Returns + ------- + data_home: string + The path to remote datasets directory. + """ + if data_home is None: + data_home = join("~", "gudhi_data") + data_home = expanduser(data_home) + makedirs(data_home, exist_ok=True) + return data_home + + +def clear_data_home(data_home = None): + """ + Delete the data home cache directory and all its content. + + Parameters + ---------- + data_home : string, default is None. + The path to remote datasets directory. If `None`, the default directory to be removed is set to "~/gudhi_data". + """ + data_home = get_data_home(data_home) + shutil.rmtree(data_home) + +def _checksum_sha256(file_path): + """ + Compute the file checksum using sha256. + + Parameters + ---------- + file_path: string + Full path of the created file. + + Returns + ------- + The hex digest of file_path. + """ + sha256_hash = hashlib.sha256() + chunk_size = 4096 + with open(file_path,"rb") as f: + # Read and update hash string value in blocks of 4K + while True: + buffer = f.read(chunk_size) + if not buffer: + break + sha256_hash.update(buffer) + return sha256_hash.hexdigest() + +def _fetch_remote(url, filename, dirname, file_checksum = None, accept_license = False): + """ + Fetch the wanted dataset from the given url and save it in file_path. + + Parameters + ---------- + url : string + The url to fetch the dataset from. + filename : string + The name to give to downloaded file. + dirname : string + The directory to save the file to. + file_checksum : string + The file checksum using sha256 to check against the one computed on the downloaded file. + Default is 'None', which means the checksum is not checked. + accept_license : boolean + Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. + Default is False. + + Returns + ------- + file_path: string + Full path of the created file. + + Raises + ------ + IOError + If the computed SHA256 checksum of file does not match the one given by the user. + """ + + file_path = join(dirname, filename) + + # Get the file + urlretrieve(url, file_path) + + if file_checksum is not None: + checksum = _checksum_sha256(file_path) + if file_checksum != checksum: + # Remove file and raise error + remove(file_path) + raise IOError("{} has a SHA256 checksum : {}, " + "different from expected : {}." + "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, file_checksum)) + + # Print license terms unless accept_license is set to True + if not accept_license: + license_file = join(dirname, "LICENSE") + if exists(license_file) and (file_path != license_file): + with open(license_file, 'r') as f: + print(f.read()) + + return file_path + +def _get_archive_and_dir(dirname, filename, label): + if dirname is None: + dirname = join(get_data_home(dirname), label) + makedirs(dirname, exist_ok=True) + else: + dirname = get_data_home(dirname) + + archive_path = join(dirname, filename) + + return archive_path, dirname + +def fetch_spiral_2d(filename = "spiral_2d.npy", dirname = None): + """ + Fetch spiral_2d dataset remotely. + + Parameters + ---------- + filename : string + The name to give to downloaded file. Default is "spiral_2d.npy". + dirname : string + The directory to save the file to. Default is None, meaning that the downloaded file will be put in "~/gudhi_data/points/spiral_2d". + + Returns + ------- + points: numpy array + Array of shape (114562, 2). + """ + file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy" + file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf' + + archive_path, dirname = _get_archive_and_dir(dirname, filename, "points/spiral_2d") + + if not exists(archive_path): + file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum) + + return np.load(file_path_pkl, mmap_mode='r') + else: + return np.load(archive_path, mmap_mode='r') + +def fetch_bunny(filename = "bunny.npy", dirname = None, accept_license = False): + """ + Fetch Stanford bunny dataset remotely and its LICENSE file. + This dataset contains 35947 vertices. + + Parameters + ---------- + filename : string + The name to give to downloaded file. Default is "bunny.npy". + dirname : string + The directory to save the file to. Default is None, meaning that the downloaded files will be put in "~/gudhi_data/points/bunny". + accept_license : boolean + Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. + Default is False. + + Returns + ------- + points: numpy array + Array of shape (35947, 3). + """ + + file_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy" + file_checksum = '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b' + license_url = "https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE" + license_checksum = 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a' + + archive_path, dirname = _get_archive_and_dir(dirname, filename, "points/bunny") + + if not exists(archive_path): + license_path = _fetch_remote(license_url, "LICENSE", dirname, license_checksum) + file_path_pkl = _fetch_remote(file_url, filename, dirname, file_checksum, accept_license) + + return np.load(file_path_pkl, mmap_mode='r') + else: + return np.load(archive_path, mmap_mode='r') diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py new file mode 100644 index 00000000..c44ac22b --- /dev/null +++ b/src/python/test/test_remote_datasets.py @@ -0,0 +1,119 @@ +# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. +# Author(s): Hind Montassif +# +# Copyright (C) 2021 Inria +# +# Modification(s): +# - YYYY/MM Author: Description of the modification + +from gudhi.datasets import remote + +import re +import shutil +import io +import sys +import pytest + +from os.path import isfile, isdir, expanduser +from os import makedirs + +def _check_dir_file_names(path_file_dw, filename, dirname): + assert isfile(path_file_dw) + + names_dw = re.split(r' |/|\\', path_file_dw) + # Case where inner directories are created in "test_gudhi_data/"; e.g: "test_gudhi_data/bunny" + if len(names_dw) >= 3: + for i in range(len(names_dw)-1): + assert re.split(r' |/|\\', dirname)[i] == names_dw[i] + assert filename == names_dw[i+1] + else: + assert dirname == names_dw[0] + assert filename == names_dw[1] + +def _check_fetch_output(url, filename, dirname = "test_gudhi_data", file_checksum = None): + makedirs(dirname, exist_ok=True) + path_file_dw = remote._fetch_remote(url, filename, dirname, file_checksum) + _check_dir_file_names(path_file_dw, filename, dirname) + +def _get_bunny_license_print(accept_license = False): + capturedOutput = io.StringIO() + # Redirect stdout + sys.stdout = capturedOutput + + makedirs("test_gudhi_data/bunny", exist_ok=True) + + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/bunny.npy", "bunny.npy", "test_gudhi_data/bunny", + '13f7842ebb4b45370e50641ff28c88685703efa5faab14edf0bb7d113a965e1b', accept_license) + # Reset redirect + sys.stdout = sys.__stdout__ + return capturedOutput + +def test_fetch_remote_datasets(): + # Test fetch with a wrong checksum + with pytest.raises(OSError): + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy", file_checksum = 'XXXXXXXXXX') + + # Test files download from given urls with checksums provided + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy", + file_checksum = '88312ffd6df2e2cb2bde9c0e1f962d7d644c6f58dc369c7b377b298dacdc4eaf') + + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", + file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') + + # Test files download from given urls without checksums + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d/spiral_2d.npy", "spiral_2d.npy") + + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") + + # Test printing existing LICENSE file when fetching bunny.npy with accept_license = False (default) + # Fetch LICENSE file + makedirs("test_gudhi_data/bunny", exist_ok=True) + remote._fetch_remote("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/bunny/LICENSE", "LICENSE", "test_gudhi_data/bunny", + 'b763dbe1b2fc6015d05cbf7bcc686412a2eb100a1f2220296e3b4a644c69633a') + with open("test_gudhi_data/bunny/LICENSE") as f: + assert f.read().rstrip("\n") == _get_bunny_license_print().getvalue().rstrip("\n") + + # Test not printing bunny.npy LICENSE when accept_license = True + assert "" == _get_bunny_license_print(accept_license = True).getvalue() + + # Remove "test_gudhi_data" directory and all its content + shutil.rmtree("test_gudhi_data") + +def test_fetch_remote_datasets_wrapped(): + # Check if gudhi_data default dir exists already + to_be_removed = not isdir(expanduser("~/gudhi_data")) + # Test fetch_spiral_2d and fetch_bunny wrapping functions (twice, to test case of already fetched files) + for i in range(2): + spiral_2d_arr = remote.fetch_spiral_2d() + assert spiral_2d_arr.shape == (114562, 2) + + bunny_arr = remote.fetch_bunny() + assert bunny_arr.shape == (35947, 3) + + # Check that default dir was created + assert isdir(expanduser("~/gudhi_data")) + + # Test fetch_spiral_2d and fetch_bunny wrapping functions with data directory different from default + spiral_2d_arr = remote.fetch_spiral_2d(dirname = "./another_fetch_folder_for_test") + assert spiral_2d_arr.shape == (114562, 2) + + bunny_arr = remote.fetch_bunny(dirname = "./another_fetch_folder_for_test") + assert bunny_arr.shape == (35947, 3) + + assert isdir(expanduser("./another_fetch_folder_for_test")) + + # Remove test folders + del spiral_2d_arr + del bunny_arr + if to_be_removed: + shutil.rmtree(expanduser("~/gudhi_data")) + shutil.rmtree(expanduser("./another_fetch_folder_for_test")) + +def test_data_home(): + # Test get_data_home and clear_data_home on new empty folder + empty_data_home = remote.get_data_home(data_home="empty_folder_for_test") + assert isdir(empty_data_home) + + remote.clear_data_home(data_home=empty_data_home) + assert not isdir(empty_data_home) |