diff options
-rw-r--r-- | .appveyor.yml | 2 | ||||
-rw-r--r-- | .circleci/config.yml | 2 | ||||
-rw-r--r-- | .github/for_maintainers/tests_strategy.md | 12 | ||||
-rw-r--r-- | azure-pipelines.yml | 2 | ||||
-rw-r--r-- | src/cmake/modules/GUDHI_modules.cmake | 11 | ||||
-rw-r--r-- | src/common/doc/installation.h | 2 | ||||
-rw-r--r-- | src/python/CMakeLists.txt | 4 | ||||
-rw-r--r-- | src/python/gudhi/datasets/remote.py | 136 | ||||
-rw-r--r-- | src/python/test/test_remote_datasets.py | 79 |
9 files changed, 236 insertions, 14 deletions
diff --git a/.appveyor.yml b/.appveyor.yml index 33458a28..af280955 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -26,7 +26,7 @@ environment: CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF - target: Python - CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON + CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DWITH_GUDHI_REMOTE_TEST=ON #cache: diff --git a/.circleci/config.yml b/.circleci/config.yml index f6a875dd..90737006 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -61,7 +61,7 @@ jobs: cmake -DUSER_VERSION_DIR=version .. make user_version cd version - cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 . + cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_REMOTE_TEST=ON . cd python python3 setup.py build_ext --inplace make sphinx diff --git a/.github/for_maintainers/tests_strategy.md b/.github/for_maintainers/tests_strategy.md index 9c181740..2bba3f42 100644 --- a/.github/for_maintainers/tests_strategy.md +++ b/.github/for_maintainers/tests_strategy.md @@ -8,13 +8,13 @@ The aim is to help maintainers to anticipate third parties modifications, update ### Linux -As all the third parties are already installed (thanks to docker), the compilations has been seperated by categories to be parallelized: +As all the third parties are already installed (thanks to docker), the compilations has been separated by categories to be parallelized: * examples (C++) * tests (C++) * utils (C++) * doxygen (C++ documentation that is available in the artefacts) -* python (including documentation and code coverage that are available in the artefacts) +* python (including documentation and code coverage that are available in the artefacts; here the WITH_GUDHI_REMOTE_TEST option is enabled which adds datasets fetching test) (cf. `.circleci/config.yml`) @@ -39,12 +39,12 @@ docker push gudhi/ci_for_gudhi_wo_cgal:latest ### Windows -The compilations has been seperated by categories to be parallelized, but I don't know why builds are not run in parallel: +The compilations has been separated by categories to be parallelized, but I don't know why builds are not run in parallel: * examples (C++) * tests (C++) * utils (C++) -* python +* python (here the WITH_GUDHI_REMOTE_TEST option is enabled which adds datasets fetching test) Doxygen (C++) is not tested. (cf. `.appveyor.yml`) @@ -54,12 +54,12 @@ In case of installation issue, check in [vcpkg issues](https://github.com/micros ### OSx -The compilations has been seperated by categories to be parallelized: +The compilations has been separated by categories to be parallelized: * examples (C++) * tests (C++) * utils (C++) -* python +* python (here the WITH_GUDHI_REMOTE_TEST option is enabled which adds datasets fetching test) * Doxygen (C++) (cf. `azure-pipelines.yml`) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a96323fd..25f61ce7 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -31,7 +31,7 @@ jobs: source activate gudhi_build_env mkdir build cd build - cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 .. + cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_REMOTE_TEST=ON .. make -j 4 make doxygen ctest -j 4 --output-on-failure # -E sphinx remove sphinx build as it fails diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake index ccaf1ac5..cbed6351 100644 --- a/src/cmake/modules/GUDHI_modules.cmake +++ b/src/cmake/modules/GUDHI_modules.cmake @@ -17,11 +17,12 @@ function(add_gudhi_module file_path) endfunction(add_gudhi_module) -option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF) -option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF) -option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON) -option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON) -option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON) +option(WITH_GUDHI_BENCHMARK "Activate/deactivate benchmark compilation" OFF) +option(WITH_GUDHI_EXAMPLE "Activate/deactivate examples compilation" OFF) +option(WITH_GUDHI_REMOTE_TEST "Activate/deactivate datasets fetching test which uses the Internet" OFF) +option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON) +option(WITH_GUDHI_TEST "Activate/deactivate tests compilation" ON) +option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation" ON) if (WITH_GUDHI_BENCHMARK) set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};benchmark") diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h index ef668dfb..6c42fbb4 100644 --- a/src/common/doc/installation.h +++ b/src/common/doc/installation.h @@ -40,6 +40,8 @@ make \endverbatim * `make test` is using <a href="https://cmake.org/cmake/help/latest/manual/ctest.1.html">Ctest</a> (CMake test driver * program). If some of the tests are failing, please send us the result of the following command: * \verbatim ctest --output-on-failure \endverbatim + * Testing fetching datasets feature requires the use of the internet and is disabled by default. If you want to include this test, set WITH_GUDHI_REMOTE_TEST to ON when building in the previous step (note that this test is included in the python module): + * \verbatim cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON --DWITH_GUDHI_PYTHON=ON .. \endverbatim * * \subsection documentationgeneration Documentation * To generate the documentation, <a target="_blank" href="http://www.doxygen.org/">Doxygen</a> is required. diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 1314b444..33711880 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -571,6 +571,10 @@ if(PYTHONINTERP_FOUND) add_gudhi_py_test(test_dtm_rips_complex) endif() + # Fetch remote datasets + if(WITH_GUDHI_REMOTE_TEST) + add_gudhi_py_test(test_remote_datasets) + endif() # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") diff --git a/src/python/gudhi/datasets/remote.py b/src/python/gudhi/datasets/remote.py new file mode 100644 index 00000000..7e8f9ce7 --- /dev/null +++ b/src/python/gudhi/datasets/remote.py @@ -0,0 +1,136 @@ +# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. +# Author(s): Hind Montassif +# +# Copyright (C) 2021 Inria +# +# Modification(s): +# - YYYY/MM Author: Description of the modification + +import hashlib + +from os.path import join, exists +from os import makedirs + +from urllib.request import urlretrieve + + +def _checksum_sha256(file_path): + """ + Compute the file checksum using sha256 + + Parameters + ---------- + file_path: string + Full path of the created file. + + Returns + ------- + The hex digest of file_path + """ + sha256_hash = hashlib.sha256() + chunk_size = 4096 + with open(file_path,"rb") as f: + # Read and update hash string value in blocks of 4K + while True: + buffer = f.read(chunk_size) + if not buffer: + break + sha256_hash.update(buffer) + return sha256_hash.hexdigest() + +def fetch(url, filename, dirname = "remote_datasets", file_checksum = None, accept_license = False): + """ + Fetch the wanted dataset from the given url and save it in file_path + + Parameters + ---------- + url : string + The url to fetch the dataset from. + filename : string + The name to give to downloaded file. + dirname : string + The directory to save the file to. Default is "remote_datasets". + file_checksum : string + The file checksum using sha256 to check against the one computed on the downloaded file. + Default is 'None'. + accept_license : boolean + Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. + Default is False + + Returns + ------- + file_path: string + Full path of the created file. + """ + + file_path = join(dirname, filename) + + # Check for an already existing file at file_path + if not exists(file_path): + # Create directory if not existing + if not exists(dirname): + makedirs(dirname) + + # Get the file + urlretrieve(url, file_path) + + if file_checksum is not None: + checksum = _checksum_sha256(file_path) + if file_checksum != checksum: + raise IOError("{} has a SHA256 checksum : {}, " + "different from expected : {}." + "The file may be corrupted or the given url may be wrong !".format(file_path, checksum, file_checksum)) + + # Print license terms unless accept_license is set to True + if not accept_license: + license_file = join(dirname, "LICENSE") + if exists(license_file) and (file_path != license_file): + with open(license_file, 'r') as f: + print(f.read()) + + return file_path + +def fetch_spiral_2d(filename = "spiral_2d.csv", dirname = "remote_datasets"): + """ + Fetch spiral_2d.csv remotely + + Parameters + ---------- + filename : string + The name to give to downloaded file. Default is "spiral_2d.csv" + dirname : string + The directory to save the file to. Default is "remote_datasets". + + Returns + ------- + file_path: string + Full path of the created file. + """ + return fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", filename, dirname, + '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + +def fetch_bunny(filename = "bunny.off", dirname = "remote_datasets/bunny", accept_license = False): + """ + Fetch bunny.off remotely and its LICENSE file + + Parameters + ---------- + filename : string + The name to give to downloaded file. Default is "bunny.off" + dirname : string + The directory to save the file to. Default is "remote_datasets/bunny". + accept_license : boolean + Flag to specify if user accepts the file LICENSE and prevents from printing the corresponding license terms. + Default is False + + Returns + ------- + files_paths: list of strings + Full paths of the created file and its LICENSE. + """ + + return [fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/LICENSE", "LICENSE", dirname, + 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956'), + fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/bunny.off", filename, dirname, + '11852d5e73e2d4bd7b86a2c5cc8a5884d0fbb72539493e8cec100ea922b19f5b', accept_license)] diff --git a/src/python/test/test_remote_datasets.py b/src/python/test/test_remote_datasets.py new file mode 100644 index 00000000..e777abc6 --- /dev/null +++ b/src/python/test/test_remote_datasets.py @@ -0,0 +1,79 @@ +# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. +# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. +# Author(s): Hind Montassif +# +# Copyright (C) 2021 Inria +# +# Modification(s): +# - YYYY/MM Author: Description of the modification + + +from gudhi.datasets import remote +import re +import os.path +import io +import sys +import pytest + +def _check_dir_file_names(path_file_dw, filename, dirname): + assert os.path.isfile(path_file_dw) + + names_dw = re.split(r' |/|\\', path_file_dw) + # Case where inner directories are created in "remote_datasets/"; e.g: "remote_datasets/bunny" + if len(names_dw) >= 3: + for i in range(len(names_dw)-1): + assert re.split(r' |/|\\', dirname)[i] == names_dw[i] + assert filename == names_dw[i+1] + else: + assert dirname == names_dw[0] + assert filename == names_dw[1] + +def _check_fetch_output(url, filename, dirname = "remote_datasets", file_checksum = None): + path_file_dw = remote.fetch(url, filename, dirname, file_checksum) + _check_dir_file_names(path_file_dw, filename, dirname) + +def _get_bunny_license_print(accept_license = False): + capturedOutput = io.StringIO() + # Redirect stdout + sys.stdout = capturedOutput + remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/bunny.off", "bunny.off", "remote_datasets/bunny", + '11852d5e73e2d4bd7b86a2c5cc8a5884d0fbb72539493e8cec100ea922b19f5b', accept_license) + # Reset redirect + sys.stdout = sys.__stdout__ + return capturedOutput + +def test_fetch_remote_datasets(): + # Test fetch with a wrong checksum + with pytest.raises(OSError): + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", file_checksum = 'XXXXXXXXXX') + + # Test files download from given urls with checksums provided + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv", + file_checksum = '37530355d980d957c4ec06b18c775f90a91e446107d06c6201c9b4000b077f38') + + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off", + file_checksum = '32f96d2cafb1177f0dd5e0a019b6ff5658e14a619a7815ae55ad0fc5e8bd3f88') + + # Test files download from given urls without checksums + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/spiral_2d.csv", "spiral_2d.csv") + + _check_fetch_output("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points/sphere3D_pts_on_grid.off", "sphere3D_pts_on_grid.off") + + # Test spiral_2d.csv wrapping function + path_file_dw = remote.fetch_spiral_2d() + _check_dir_file_names(path_file_dw, 'spiral_2d.csv', 'remote_datasets') + + # Test printing existing LICENSE file when fetching bunny.off with accept_license = False (default) + # Fetch LICENSE file + remote.fetch("https://raw.githubusercontent.com/GUDHI/gudhi-data/main/points//bunny/LICENSE", "LICENSE", "remote_datasets/bunny", + 'aeb1bad319b7d74fa0b8076358182f9c6b1284c67cc07dc67cbc9bc73025d956') + with open("remote_datasets/bunny/LICENSE") as f: + assert f.read() == _get_bunny_license_print().getvalue().rstrip("\n") + + # Test not printing bunny.off LICENSE when accept_license = True + assert "" == _get_bunny_license_print(accept_license = True).getvalue() + + # Test fetch_bunny wrapping function + path_file_dw = remote.fetch_bunny() + _check_dir_file_names(path_file_dw[0], 'LICENSE', 'remote_datasets/bunny') + _check_dir_file_names(path_file_dw[1], 'bunny.off', 'remote_datasets/bunny') |