summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVincent Rouvreau <vincent.rouvreau@inria.fr>2022-01-24 10:58:26 +0100
committerVincent Rouvreau <vincent.rouvreau@inria.fr>2022-01-24 10:58:26 +0100
commitbbf6587030d09186002d41327243646aa5e6ced5 (patch)
tree70f6bacde1608f6eb6dabf0d869dc4dcf1359518
parent6b6a2ca097e38ca8d1601e764c8f3a6eecfd19e7 (diff)
parent4d27d023c89d6c6ad10b3923c3cdbd9ed5952711 (diff)
Merge master
-rw-r--r--.appveyor.yml80
-rw-r--r--.github/for_maintainers/tests_strategy.md10
-rw-r--r--.github/next_release.md4
-rw-r--r--CMakeLists.txt4
-rw-r--r--README.md3
-rw-r--r--azure-pipelines.yml75
-rw-r--r--src/CMakeLists.txt4
-rw-r--r--src/cmake/modules/GUDHI_modules.cmake6
-rw-r--r--src/cmake/modules/GUDHI_options.cmake5
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.cmake177
-rw-r--r--src/python/CMakeLists.txt5
-rw-r--r--src/python/gudhi/representations/vector_methods.py159
-rwxr-xr-xsrc/python/test/test_betti_curve_representations.py59
-rwxr-xr-xsrc/python/test/test_representations.py3
14 files changed, 360 insertions, 234 deletions
diff --git a/.appveyor.yml b/.appveyor.yml
deleted file mode 100644
index 33458a28..00000000
--- a/.appveyor.yml
+++ /dev/null
@@ -1,80 +0,0 @@
-image:
- - Visual Studio 2019
-
-build:
- parallel: true
- verbosity: detailed
-
-configuration:
- - Release
-
-environment:
- # update the vcpkg cache even if build fails
- # APPVEYOR_SAVE_CACHE_ON_ERROR: true
- PYTHON: "C:\\Python39-x64"
- PYTHONPATH: "C:\\Python39-x64\\lib\\site-packages"
- CMAKE_VCPKG_FLAGS: -DVCPKG_TARGET_TRIPLET=x64-windows -DCMAKE_TOOLCHAIN_FILE=c:\Tools\vcpkg\scripts\buildsystems\vcpkg.cmake
-
- matrix:
- - target: Examples
- CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF
-
- - target: UnitaryTests
- CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF
-
- - target: Utilities
- CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF
-
- - target: Python
- CMAKE_FLAGS: -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON
-
-
-#cache:
-# - c:\Tools\vcpkg\installed
-# - '%LOCALAPPDATA%\pip\Cache'
-
-init:
- - echo %target%
-
-install:
- - git submodule update --init
- - vcpkg update
- - vcpkg remove --outdated
- - vcpkg upgrade --no-dry-run
- - vcpkg install boost-filesystem:x64-windows boost-test:x64-windows boost-program-options:x64-windows tbb:x64-windows eigen3:x64-windows cgal:x64-windows
- - dir "C:\Tools\vcpkg\installed\x64-windows\bin\"
- - vcpkg integrate install
- - CALL "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvarsall.bat" amd64
- - "set PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
- - python --version
- - pip --version
- - python -m pip install --upgrade pip
- - python -m pip install --upgrade setuptools
- - python -m pip install -r ext\gudhi-deploy\build-requirements.txt
- # No PyKeOps on windows, let's workaround this one.
- - for /F "tokens=*" %%A in (ext\gudhi-deploy\test-requirements.txt) do python -m pip install %%A
- - dir "c:\python39-x64\lib\site-packages"
- - dir "%LOCALAPPDATA%\pip\Cache"
- - python -c "from scipy import spatial; print(spatial.cKDTree)"
-
-build_script:
- - mkdir build
- - cd build
- - cmake -G "Visual Studio 16 2019" -A x64 -DCMAKE_BUILD_TYPE=Release %CMAKE_FLAGS% %CMAKE_VCPKG_FLAGS% ..
- - if [%target%]==[Python] (
- cd src\python &
- dir . &
- type setup.py &
- copy "C:\Tools\vcpkg\installed\x64-windows\bin\mpfr-6.dll" ".\gudhi\" &
- copy "C:\Tools\vcpkg\installed\x64-windows\bin\gmp.dll" ".\gudhi\" &
- copy "C:\Tools\vcpkg\installed\x64-windows\bin\tbb.dll" ".\gudhi\" &
- copy "C:\Tools\vcpkg\installed\x64-windows\bin\tbbmalloc.dll" ".\gudhi\" &
- python setup.py build_ext --inplace &
- SET PYTHONPATH=%CD%;%PYTHONPATH% &
- echo %PYTHONPATH% &
- ctest -j 1 --output-on-failure -C Release
- ) else (
- dir . &
- MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64 &
- ctest -j 1 --output-on-failure -C Release -E diff_files
- )
diff --git a/.github/for_maintainers/tests_strategy.md b/.github/for_maintainers/tests_strategy.md
index 9c181740..c25acf9b 100644
--- a/.github/for_maintainers/tests_strategy.md
+++ b/.github/for_maintainers/tests_strategy.md
@@ -39,22 +39,24 @@ docker push gudhi/ci_for_gudhi_wo_cgal:latest
### Windows
-The compilations has been seperated by categories to be parallelized, but I don't know why builds are not run in parallel:
+The compilations are not parallelized, as installation time (about 30 minutes) is too much compare to
+build and tests timings (about 30 minutes). Builds and tests include:
* examples (C++)
* tests (C++)
* utils (C++)
* python
-Doxygen (C++) is not tested.
-(cf. `.appveyor.yml`)
+Doxygen (C++) is not generated.
+(cf. `azure-pipelines.yml`)
C++ third parties installation are done thanks to [vcpkg](https://github.com/microsoft/vcpkg/).
In case of installation issue, check in [vcpkg issues](https://github.com/microsoft/vcpkg/issues).
### OSx
-The compilations has been seperated by categories to be parallelized:
+The compilations are not parallelized, but they should, as installation time (about 4 minutes) is
+negligeable compare to build and tests timings (about 30 minutes). Builds and tests include:
* examples (C++)
* tests (C++)
diff --git a/.github/next_release.md b/.github/next_release.md
index 5a8eadb7..211ae117 100644
--- a/.github/next_release.md
+++ b/.github/next_release.md
@@ -6,8 +6,8 @@ We are now using GitHub to develop the GUDHI library, do not hesitate to [fork t
Below is a list of changes made since GUDHI 3.5.0:
-- [Module](link)
- - ...
+- [Representations](https://gudhi.inria.fr/python/latest/representations.html#gudhi.representations.vector_methods.BettiCurve)
+ - A more flexible Betti curve class capable of computing exact curves
- [Python installation](link)
- Python >= 3.5 and cython >= 0.27 are now required.
diff --git a/CMakeLists.txt b/CMakeLists.txt
index d0cf6a25..ac877eea 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -2,9 +2,9 @@ cmake_minimum_required(VERSION 3.5)
project(GUDHIdev)
-include(CMakeGUDHIVersion.txt)
-
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/")
+include(CMakeGUDHIVersion.txt)
+include(GUDHI_options)
# Reset cache
set(GUDHI_MODULES "" CACHE INTERNAL "GUDHI_MODULES")
diff --git a/README.md b/README.md
index 279953e1..664483ee 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,6 @@
-[![OSx on Azure](https://dev.azure.com/GUDHI/gudhi-devel/_apis/build/status/GUDHI.gudhi-devel?branchName=master)](https://dev.azure.com/GUDHI/gudhi-devel/_build/latest?definitionId=1&branchName=master)
+[![OSx and Win on Azure](https://dev.azure.com/GUDHI/gudhi-devel/_apis/build/status/GUDHI.gudhi-devel?branchName=master)](https://dev.azure.com/GUDHI/gudhi-devel/_build/latest?definitionId=1&branchName=master)
[![Linux on CircleCI](https://circleci.com/gh/GUDHI/gudhi-devel/tree/master.svg?style=svg)](https://circleci.com/gh/GUDHI/gudhi-devel/tree/master)
-[![Win on Appveyor](https://ci.appveyor.com/api/projects/status/976j2uut8xgalvx2/branch/master?svg=true)](https://ci.appveyor.com/project/GUDHI/gudhi-devel/branch/master)
[![Anaconda Cloud](https://anaconda.org/conda-forge/gudhi/badges/version.svg)](https://anaconda.org/conda-forge/gudhi)
[![Anaconda downloads](https://anaconda.org/conda-forge/gudhi/badges/downloads.svg)](https://anaconda.org/conda-forge/gudhi)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index a96323fd..21664244 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -1,26 +1,26 @@
jobs:
- - job: 'Test'
- displayName: "Build and test"
+ - job: 'OSx'
+ displayName: "Build and test OSx"
timeoutInMinutes: 0
cancelTimeoutInMinutes: 60
pool:
vmImage: macOS-10.15
variables:
- pythonVersion: '3.6'
+ pythonVersion: '3.7'
cmakeBuildType: Release
steps:
- - bash: echo "##vso[task.prependpath]$CONDA/bin"
- displayName: Add conda to PATH
-
- - bash: sudo conda create --yes --quiet --name gudhi_build_env
- displayName: Create Anaconda environment
-
+ # Use a specific Python version
+ - task: UsePythonVersion@0
+ displayName: Use Python $(pythonVersion)
+ inputs:
+ versionSpec: $(pythonVersion)
+ addToPath: true
+ architecture: 'x64'
+
- bash: |
- source activate gudhi_build_env
git submodule update --init
- sudo conda install --yes --quiet --name gudhi_build_env python=$(pythonVersion)
python -m pip install --user -r ext/gudhi-deploy/build-requirements.txt
python -m pip install --user -r ext/gudhi-deploy/test-requirements.txt
python -m pip uninstall -y pykeops
@@ -28,11 +28,56 @@ jobs:
brew install graphviz doxygen boost eigen gmp mpfr tbb cgal || true
displayName: 'Install build dependencies'
- bash: |
- source activate gudhi_build_env
mkdir build
cd build
- cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 ..
- make -j 4
+ cmake -DCMAKE_BUILD_TYPE:STRING=$(cmakeBuildType) -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=ON ..
+ make
make doxygen
- ctest -j 4 --output-on-failure # -E sphinx remove sphinx build as it fails
+ ctest --output-on-failure
displayName: 'Build, test and documentation generation'
+
+ - job: 'Windows'
+ displayName: "Build and test Windows"
+ timeoutInMinutes: 0
+ cancelTimeoutInMinutes: 60
+ pool:
+ vmImage: windows-latest
+ variables:
+ pythonVersion: '3.7'
+ cmakeVcpkgFlags: -DVCPKG_TARGET_TRIPLET=x64-windows -DCMAKE_TOOLCHAIN_FILE=c:\vcpkg\scripts\buildsystems\vcpkg.cmake
+ cmakeFlags: -DWITH_GUDHI_EXAMPLE=ON -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_UTILITIES=ON -DWITH_GUDHI_PYTHON=OFF
+
+ steps:
+ # Use a specific Python version
+ - task: UsePythonVersion@0
+ displayName: Use Python $(pythonVersion)
+ inputs:
+ versionSpec: $(pythonVersion)
+ addToPath: true
+ architecture: 'x64'
+
+ - script: |
+ git submodule update --init
+ python -m pip install --user -r ext/gudhi-deploy/build-requirements.txt
+ # No PyKeOps on windows, let's workaround this one.
+ for /F "tokens=*" %%A in (ext\gudhi-deploy\test-requirements.txt) do python -m pip install %%A
+ vcpkg install boost-filesystem:x64-windows boost-test:x64-windows boost-program-options:x64-windows tbb:x64-windows eigen3:x64-windows cgal:x64-windows
+ displayName: 'Install build dependencies'
+ - script: |
+ call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" amd64
+ mkdir build
+ cd build
+ cmake -G "Visual Studio 16 2019" -A x64 -DCMAKE_BUILD_TYPE=Release $(cmakeVcpkgFlags) $(cmakeFlags) ..
+ MSBuild GUDHIdev.sln /m /p:Configuration=Release /p:Platform=x64
+ ctest --output-on-failure -C Release -E diff_files
+ cmake -DWITH_GUDHI_PYTHON=ON .
+ cd src\python
+ copy "C:\vcpkg\installed\x64-windows\bin\mpfr-6.dll" ".\gudhi\"
+ copy "C:\vcpkg\installed\x64-windows\bin\gmp.dll" ".\gudhi\"
+ copy "C:\vcpkg\installed\x64-windows\bin\tbb.dll" ".\gudhi\"
+ copy "C:\vcpkg\installed\x64-windows\bin\tbbmalloc.dll" ".\gudhi\"
+ python setup.py build_ext --inplace
+ SET PYTHONPATH=%CD%;%PYTHONPATH%
+ echo %PYTHONPATH%
+ ctest --output-on-failure -C Release
+ displayName: 'Build and test'
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 8f6a1ccc..8023e04c 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -2,9 +2,9 @@ cmake_minimum_required(VERSION 3.5)
project(GUDHI)
-include(CMakeGUDHIVersion.txt)
-
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
+include(CMakeGUDHIVersion.txt)
+include(GUDHI_options)
set(GUDHI_MODULES "" CACHE INTERNAL "GUDHI_MODULES")
set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES")
diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake
index ccaf1ac5..13248f7e 100644
--- a/src/cmake/modules/GUDHI_modules.cmake
+++ b/src/cmake/modules/GUDHI_modules.cmake
@@ -17,12 +17,6 @@ function(add_gudhi_module file_path)
endfunction(add_gudhi_module)
-option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF)
-option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF)
-option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON)
-option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON)
-option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON)
-
if (WITH_GUDHI_BENCHMARK)
set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};benchmark")
endif()
diff --git a/src/cmake/modules/GUDHI_options.cmake b/src/cmake/modules/GUDHI_options.cmake
new file mode 100644
index 00000000..3cd0a489
--- /dev/null
+++ b/src/cmake/modules/GUDHI_options.cmake
@@ -0,0 +1,5 @@
+option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF)
+option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF)
+option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON)
+option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON)
+option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON)
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake
index 706dea58..c8aa1665 100644
--- a/src/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -6,7 +6,7 @@ find_package(Boost 1.56.0 QUIET OPTIONAL_COMPONENTS filesystem unit_test_framewo
if(NOT Boost_VERSION)
message(FATAL_ERROR "NOTICE: This program requires Boost and will not be compiled.")
endif(NOT Boost_VERSION)
-INCLUDE_DIRECTORIES(${Boost_INCLUDE_DIRS})
+include_directories(${Boost_INCLUDE_DIRS})
message(STATUS "boost include dirs:" ${Boost_INCLUDE_DIRS})
message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
@@ -92,91 +92,92 @@ add_definitions( -DBOOST_ALL_DYN_LINK )
# problem on Mac with boost_system and boost_thread
add_definitions( -DBOOST_SYSTEM_NO_DEPRECATED )
-# Find the correct Python interpreter.
-# Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 or -DPython_ADDITIONAL_VERSIONS=3 for instance.
-find_package( PythonInterp )
-
-# find_python_module tries to import module in Python interpreter and to retrieve its version number
-# returns ${PYTHON_MODULE_NAME_UP}_VERSION and ${PYTHON_MODULE_NAME_UP}_FOUND
-function( find_python_module PYTHON_MODULE_NAME )
- string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
- execute_process(
- COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}; print(${PYTHON_MODULE_NAME}.__version__)"
- RESULT_VARIABLE PYTHON_MODULE_RESULT
- OUTPUT_VARIABLE PYTHON_MODULE_VERSION
- ERROR_VARIABLE PYTHON_MODULE_ERROR)
- if(PYTHON_MODULE_RESULT EQUAL 0)
- # Remove all carriage returns as it can be multiline
- string(REGEX REPLACE "\n" " " PYTHON_MODULE_VERSION "${PYTHON_MODULE_VERSION}")
- message ("++ Python module ${PYTHON_MODULE_NAME} - Version ${PYTHON_MODULE_VERSION} found")
-
- set(${PYTHON_MODULE_NAME_UP}_VERSION ${PYTHON_MODULE_VERSION} PARENT_SCOPE)
- set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
- else()
- message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
- - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
- - PYTHON_MODULE_VERSION = ${PYTHON_MODULE_VERSION}
- - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
- unset(${PYTHON_MODULE_NAME_UP}_VERSION PARENT_SCOPE)
- set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
- endif()
-endfunction( find_python_module )
-
-# For modules that do not define module.__version__
-function( find_python_module_no_version PYTHON_MODULE_NAME )
- string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
- execute_process(
- COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}"
- RESULT_VARIABLE PYTHON_MODULE_RESULT
- ERROR_VARIABLE PYTHON_MODULE_ERROR)
- if(PYTHON_MODULE_RESULT EQUAL 0)
- # Remove carriage return
- message ("++ Python module ${PYTHON_MODULE_NAME} found")
- set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
- else()
- message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
- - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
- - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
- set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+if (WITH_GUDHI_PYTHON)
+ # Find the correct Python interpreter.
+ # Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 or -DPython_ADDITIONAL_VERSIONS=3 for instance.
+ find_package( PythonInterp )
+
+ # find_python_module tries to import module in Python interpreter and to retrieve its version number
+ # returns ${PYTHON_MODULE_NAME_UP}_VERSION and ${PYTHON_MODULE_NAME_UP}_FOUND
+ function( find_python_module PYTHON_MODULE_NAME )
+ string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}; print(${PYTHON_MODULE_NAME}.__version__)"
+ RESULT_VARIABLE PYTHON_MODULE_RESULT
+ OUTPUT_VARIABLE PYTHON_MODULE_VERSION
+ ERROR_VARIABLE PYTHON_MODULE_ERROR)
+ if(PYTHON_MODULE_RESULT EQUAL 0)
+ # Remove all carriage returns as it can be multiline
+ string(REGEX REPLACE "\n" " " PYTHON_MODULE_VERSION "${PYTHON_MODULE_VERSION}")
+ message ("++ Python module ${PYTHON_MODULE_NAME} - Version ${PYTHON_MODULE_VERSION} found")
+
+ set(${PYTHON_MODULE_NAME_UP}_VERSION ${PYTHON_MODULE_VERSION} PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
+ else()
+ message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
+ - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
+ - PYTHON_MODULE_VERSION = ${PYTHON_MODULE_VERSION}
+ - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
+ unset(${PYTHON_MODULE_NAME_UP}_VERSION PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+ endif()
+ endfunction( find_python_module )
+
+ # For modules that do not define module.__version__
+ function( find_python_module_no_version PYTHON_MODULE_NAME )
+ string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}"
+ RESULT_VARIABLE PYTHON_MODULE_RESULT
+ ERROR_VARIABLE PYTHON_MODULE_ERROR)
+ if(PYTHON_MODULE_RESULT EQUAL 0)
+ # Remove carriage return
+ message ("++ Python module ${PYTHON_MODULE_NAME} found")
+ set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
+ else()
+ message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
+ - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
+ - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
+ set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+ endif()
+ endfunction( find_python_module_no_version )
+
+ if( PYTHONINTERP_FOUND )
+ find_python_module("cython")
+ find_python_module("pytest")
+ find_python_module("matplotlib")
+ find_python_module("numpy")
+ find_python_module("scipy")
+ find_python_module("sphinx")
+ find_python_module("sklearn")
+ find_python_module("ot")
+ find_python_module("pybind11")
+ find_python_module("torch")
+ find_python_module("pykeops")
+ find_python_module("eagerpy")
+ find_python_module_no_version("hnswlib")
+ find_python_module("tensorflow")
+ find_python_module("sphinx_paramlinks")
+ find_python_module_no_version("python_docs_theme")
endif()
-endfunction( find_python_module_no_version )
-
-if( PYTHONINTERP_FOUND )
- find_python_module("cython")
- find_python_module("pytest")
- find_python_module("matplotlib")
- find_python_module("numpy")
- find_python_module("scipy")
- find_python_module("sphinx")
- find_python_module("sklearn")
- find_python_module("ot")
- find_python_module("pybind11")
- find_python_module("torch")
- find_python_module("pykeops")
- find_python_module("eagerpy")
- find_python_module_no_version("hnswlib")
- find_python_module("tensorflow")
- find_python_module("sphinx_paramlinks")
- find_python_module_no_version("python_docs_theme")
-endif()
-
-if(NOT GUDHI_PYTHON_PATH)
- message(FATAL_ERROR "ERROR: GUDHI_PYTHON_PATH is not valid.")
-endif(NOT GUDHI_PYTHON_PATH)
-
-option(WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Usefull when setting rpath is not allowed" ON)
-
-if(PYTHONINTERP_FOUND AND CYTHON_FOUND)
- if(SPHINX_FOUND)
- # Documentation generation is available through sphinx
- find_program( SPHINX_PATH sphinx-build )
-
- if(NOT SPHINX_PATH)
- if(PYTHON_VERSION_MAJOR EQUAL 3)
- # In Python3, just hack sphinx-build if it does not exist
- set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "-m" "sphinx.cmd.build")
- endif(PYTHON_VERSION_MAJOR EQUAL 3)
- endif(NOT SPHINX_PATH)
- endif(SPHINX_FOUND)
-endif(PYTHONINTERP_FOUND AND CYTHON_FOUND)
-
+
+ if(NOT GUDHI_PYTHON_PATH)
+ message(FATAL_ERROR "ERROR: GUDHI_PYTHON_PATH is not valid.")
+ endif(NOT GUDHI_PYTHON_PATH)
+
+ option(WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Usefull when setting rpath is not allowed" ON)
+
+ if(PYTHONINTERP_FOUND AND CYTHON_FOUND)
+ if(SPHINX_FOUND)
+ # Documentation generation is available through sphinx
+ find_program( SPHINX_PATH sphinx-build )
+
+ if(NOT SPHINX_PATH)
+ if(PYTHON_VERSION_MAJOR EQUAL 3)
+ # In Python3, just hack sphinx-build if it does not exist
+ set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "-m" "sphinx.cmd.build")
+ endif(PYTHON_VERSION_MAJOR EQUAL 3)
+ endif(NOT SPHINX_PATH)
+ endif(SPHINX_FOUND)
+ endif(PYTHONINTERP_FOUND AND CYTHON_FOUND)
+endif (WITH_GUDHI_PYTHON)
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 26a4e4b6..12ffcd85 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -548,6 +548,11 @@ if(PYTHONINTERP_FOUND)
add_gudhi_py_test(test_representations)
endif()
+ # Betti curves
+ if(SKLEARN_FOUND AND SCIPY_FOUND)
+ add_gudhi_py_test(test_betti_curve_representations)
+ endif()
+
# Time Delay
add_gudhi_py_test(test_time_delay)
diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py
index e883b5dd..f8078d03 100644
--- a/src/python/gudhi/representations/vector_methods.py
+++ b/src/python/gudhi/representations/vector_methods.py
@@ -1,15 +1,17 @@
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
-# Author(s): Mathieu Carrière, Martin Royer
+# Author(s): Mathieu Carrière, Martin Royer, Gard Spreemann
#
# Copyright (C) 2018-2020 Inria
#
# Modification(s):
# - 2020/06 Martin: ATOL integration
+# - 2020/12 Gard: A more flexible Betti curve class capable of computing exact curves.
# - 2021/11 Vincent Rouvreau: factorize _automatic_sample_range
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
+from sklearn.exceptions import NotFittedError
from sklearn.preprocessing import MinMaxScaler, MaxAbsScaler
from sklearn.neighbors import DistanceMetric
from sklearn.metrics import pairwise
@@ -306,67 +308,162 @@ class Silhouette(BaseEstimator, TransformerMixin):
"""
return self.fit_transform([diag])[0,:]
+
class BettiCurve(BaseEstimator, TransformerMixin):
"""
- This is a class for computing Betti curves from a list of persistence diagrams. A Betti curve is a 1D piecewise-constant function obtained from the rank function. It is sampled evenly on a given range and the vector of samples is returned. See https://www.researchgate.net/publication/316604237_Time_Series_Classification_via_Topological_Data_Analysis for more details.
+ Compute Betti curves from persistence diagrams. There are several modes of operation: with a given resolution (with or without a sample_range), with a predefined grid, and with none of the previous. With a predefined grid, the class computes the Betti numbers at those grid points. Without a predefined grid, if the resolution is set to None, it can be fit to a list of persistence diagrams and produce a grid that consists of (at least) the filtration values at which at least one of those persistence diagrams changes Betti numbers, and then compute the Betti numbers at those grid points. In the latter mode, the exact Betti curve is computed for the entire real line. Otherwise, if the resolution is given, the Betti curve is obtained by sampling evenly using either the given sample_range or based on the persistence diagrams.
"""
- def __init__(self, resolution=100, sample_range=[np.nan, np.nan]):
+
+ def __init__(self, resolution=100, sample_range=[np.nan, np.nan], predefined_grid=None):
"""
Constructor for the BettiCurve class.
Parameters:
resolution (int): number of sample for the piecewise-constant function (default 100).
sample_range ([double, double]): minimum and maximum of the piecewise-constant function domain, of the form [x_min, x_max] (default [numpy.nan, numpy.nan]). It is the interval on which samples will be drawn evenly. If one of the values is numpy.nan, it can be computed from the persistence diagrams with the fit() method.
+ predefined_grid (1d array or None, default=None): Predefined filtration grid points at which to compute the Betti curves. Must be strictly ordered. Infinities are ok. If None (default), and resolution is given, the grid will be uniform from x_min to x_max in 'resolution' steps, otherwise a grid will be computed that captures all changes in Betti numbers in the provided data.
+
+ Attributes:
+ grid_ (1d array): The grid on which the Betti numbers are computed. If predefined_grid was specified, `grid_` will always be that grid, independently of data. If not, the grid is fitted to capture all filtration values at which the Betti numbers change.
+
+ Examples
+ --------
+ If pd is a persistence diagram and xs is a nonempty grid of finite values such that xs[0] >= pd.min(), then the results of:
+
+ >>> bc = BettiCurve(predefined_grid=xs) # doctest: +SKIP
+ >>> result = bc(pd) # doctest: +SKIP
+
+ and
+
+ >>> from scipy.interpolate import interp1d # doctest: +SKIP
+ >>> bc = BettiCurve(resolution=None, predefined_grid=None) # doctest: +SKIP
+ >>> bettis = bc.fit_transform([pd]) # doctest: +SKIP
+ >>> interp = interp1d(bc.grid_, bettis[0, :], kind="previous", fill_value="extrapolate") # doctest: +SKIP
+ >>> result = np.array(interp(xs), dtype=int) # doctest: +SKIP
+
+ are the same.
"""
- self.resolution, self.sample_range = resolution, sample_range
- def fit(self, X, y=None):
+ if (predefined_grid is not None) and (not isinstance(predefined_grid, np.ndarray)):
+ raise ValueError("Expected predefined_grid as array or None.")
+
+ self.predefined_grid = predefined_grid
+ self.resolution = resolution
+ self.sample_range = sample_range
+
+ def is_fitted(self):
+ return hasattr(self, "grid_")
+
+ def fit(self, X, y = None):
"""
- Fit the BettiCurve class on a list of persistence diagrams: if any of the values in **sample_range** is numpy.nan, replace it with the corresponding value computed on the given list of persistence diagrams.
+ Fit the BettiCurve class on a list of persistence diagrams: if any of the values in **sample_range** is numpy.nan, replace it with the corresponding value computed on the given list of persistence diagrams. When no predefined grid is provided and resolution set to None, compute a filtration grid that captures all changes in Betti numbers for all the given persistence diagrams.
Parameters:
- X (list of n x 2 numpy arrays): input persistence diagrams.
- y (n x 1 array): persistence diagram labels (unused).
+ X (list of 2d arrays): Persistence diagrams.
+ y (None): Ignored.
"""
- self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
+
+ if self.predefined_grid is None:
+ if self.resolution is None: # Flexible/exact version
+ events = np.unique(np.concatenate([pd.flatten() for pd in X] + [[-np.inf]], axis=0))
+ self.grid_ = np.array(events)
+ else:
+ self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
+ self.grid_ = np.linspace(self.sample_range[0], self.sample_range[1], self.resolution)
+ else:
+ self.grid_ = self.predefined_grid # Get the predefined grid from user
+
return self
def transform(self, X):
"""
- Compute the Betti curve for each persistence diagram individually and concatenate the results.
+ Compute Betti curves.
Parameters:
- X (list of n x 2 numpy arrays): input persistence diagrams.
-
+ X (list of 2d arrays): Persistence diagrams.
+
Returns:
- numpy array with shape (number of diagrams) x (**resolution**): output Betti curves.
+ `len(X).len(self.grid_)` array of ints: Betti numbers of the given persistence diagrams at the grid points given in `self.grid_`
"""
- Xfit = []
- x_values = np.linspace(self.sample_range[0], self.sample_range[1], self.resolution)
- step_x = x_values[1] - x_values[0]
- for diagram in X:
- diagram_int = np.clip(np.ceil((diagram[:,:2] - self.sample_range[0]) / step_x), 0, self.resolution).astype(int)
- bc = np.zeros(self.resolution)
- for interval in diagram_int:
- bc[interval[0]:interval[1]] += 1
- Xfit.append(np.reshape(bc,[1,-1]))
+ if not self.is_fitted():
+ raise NotFittedError("Not fitted.")
- Xfit = np.concatenate(Xfit, 0)
+ if not X:
+ X = [np.zeros((0, 2))]
+
+ N = len(X)
- return Xfit
+ events = np.concatenate([pd.flatten(order="F") for pd in X], axis=0)
+ sorting = np.argsort(events)
+ offsets = np.zeros(1 + N, dtype=int)
+ for i in range(0, N):
+ offsets[i+1] = offsets[i] + 2*X[i].shape[0]
+ starts = offsets[0:N]
+ ends = offsets[1:N + 1] - 1
- def __call__(self, diag):
+ bettis = [[0] for i in range(0, N)]
+
+ i = 0
+ for x in self.grid_:
+ while i < len(sorting) and events[sorting[i]] <= x:
+ j = np.searchsorted(ends, sorting[i])
+ delta = 1 if sorting[i] - starts[j] < len(X[j]) else -1
+ bettis[j][-1] += delta
+ i += 1
+ for k in range(0, N):
+ bettis[k].append(bettis[k][-1])
+
+ return np.array(bettis, dtype=int)[:, 0:-1]
+
+ def fit_transform(self, X):
+ """
+ The result is the same as fit(X) followed by transform(X), but potentially faster.
"""
- Apply BettiCurve on a single persistence diagram and outputs the result.
- Parameters:
- diag (n x 2 numpy array): input persistence diagram.
+ if self.predefined_grid is None and self.resolution is None:
+ if not X:
+ X = [np.zeros((0, 2))]
- Returns:
- numpy array with shape (**resolution**): output Betti curve.
+ N = len(X)
+
+ events = np.concatenate([pd.flatten(order="F") for pd in X], axis=0)
+ sorting = np.argsort(events)
+ offsets = np.zeros(1 + N, dtype=int)
+ for i in range(0, N):
+ offsets[i+1] = offsets[i] + 2*X[i].shape[0]
+ starts = offsets[0:N]
+ ends = offsets[1:N + 1] - 1
+
+ xs = [-np.inf]
+ bettis = [[0] for i in range(0, N)]
+
+ for i in sorting:
+ j = np.searchsorted(ends, i)
+ delta = 1 if i - starts[j] < len(X[j]) else -1
+ if events[i] == xs[-1]:
+ bettis[j][-1] += delta
+ else:
+ xs.append(events[i])
+ for k in range(0, j):
+ bettis[k].append(bettis[k][-1])
+ bettis[j].append(bettis[j][-1] + delta)
+ for k in range(j+1, N):
+ bettis[k].append(bettis[k][-1])
+
+ self.grid_ = np.array(xs)
+ return np.array(bettis, dtype=int)
+
+ else:
+ return self.fit(X).transform(X)
+
+ def __call__(self, diag):
"""
- return self.fit_transform([diag])[0,:]
+ Shorthand for transform on a single persistence diagram.
+ """
+ return self.fit_transform([diag])[0, :]
+
+
class Entropy(BaseEstimator, TransformerMixin):
"""
diff --git a/src/python/test/test_betti_curve_representations.py b/src/python/test/test_betti_curve_representations.py
new file mode 100755
index 00000000..6a45da4d
--- /dev/null
+++ b/src/python/test/test_betti_curve_representations.py
@@ -0,0 +1,59 @@
+import numpy as np
+import scipy.interpolate
+import pytest
+
+from gudhi.representations.vector_methods import BettiCurve
+
+def test_betti_curve_is_irregular_betti_curve_followed_by_interpolation():
+ m = 10
+ n = 1000
+ pinf = 0.05
+ pzero = 0.05
+ res = 100
+
+ pds = []
+ for i in range(0, m):
+ pd = np.zeros((n, 2))
+ pd[:, 0] = np.random.uniform(0, 10, n)
+ pd[:, 1] = np.random.uniform(pd[:, 0], 10, n)
+ pd[np.random.uniform(0, 1, n) < pzero, 0] = 0
+ pd[np.random.uniform(0, 1, n) < pinf, 1] = np.inf
+ pds.append(pd)
+
+ bc = BettiCurve(resolution=None, predefined_grid=None)
+ bc.fit(pds)
+ bettis = bc.transform(pds)
+
+ bc2 = BettiCurve(resolution=None, predefined_grid=None)
+ bettis2 = bc2.fit_transform(pds)
+ assert((bc2.grid_ == bc.grid_).all())
+ assert((bettis2 == bettis).all())
+
+ for i in range(0, m):
+ grid = np.linspace(pds[i][np.isfinite(pds[i])].min(), pds[i][np.isfinite(pds[i])].max() + 1, res)
+ bc_gridded = BettiCurve(predefined_grid=grid)
+ bc_gridded.fit([])
+ bettis_gridded = bc_gridded(pds[i])
+
+ interp = scipy.interpolate.interp1d(bc.grid_, bettis[i, :], kind="previous", fill_value="extrapolate")
+ bettis_interp = np.array(interp(grid), dtype=int)
+ assert((bettis_interp == bettis_gridded).all())
+
+
+def test_empty_with_predefined_grid():
+ random_grid = np.sort(np.random.uniform(0, 1, 100))
+ bc = BettiCurve(predefined_grid=random_grid)
+ bettis = bc.fit_transform([])
+ assert((bc.grid_ == random_grid).all())
+ assert((bettis == 0).all())
+
+
+def test_empty():
+ bc = BettiCurve(resolution=None, predefined_grid=None)
+ bettis = bc.fit_transform([])
+ assert(bc.grid_ == [-np.inf])
+ assert((bettis == 0).all())
+
+def test_wrong_value_of_predefined_grid():
+ with pytest.raises(ValueError):
+ BettiCurve(predefined_grid=[1, 2, 3])
diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py
index 93461f1e..d219ce7a 100755
--- a/src/python/test/test_representations.py
+++ b/src/python/test/test_representations.py
@@ -105,7 +105,6 @@ def test_dummy_atol():
from gudhi.representations.vector_methods import BettiCurve
-
def test_infinity():
a = np.array([[1.0, 8.0], [2.0, np.inf], [3.0, 4.0]])
c = BettiCurve(20, [0.0, 10.0])(a)
@@ -113,7 +112,6 @@ def test_infinity():
assert c[7] == 3
assert c[9] == 2
-
def test_preprocessing_empty_diagrams():
empty_diag = np.empty(shape = [0, 2])
assert not np.any(BirthPersistenceTransform()(empty_diag))
@@ -169,3 +167,4 @@ def test_kernel_empty_diagrams():
# PersistenceScaleSpaceKernel(kernel_approx=RBFSampler(gamma=1./2, n_components=100000).fit(np.ones([1,2])))(empty_diag, empty_diag)
# PersistenceFisherKernel(bandwidth_fisher=1., bandwidth=1.)(empty_diag, empty_diag)
# PersistenceFisherKernel(bandwidth_fisher=1., bandwidth=1., kernel_approx=RBFSampler(gamma=1./2, n_components=100000).fit(np.ones([1,2])))(empty_diag, empty_diag)
+