diff options
42 files changed, 986 insertions, 453 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml index e2df5c87..ef22fbea 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,6 +10,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test examples command: | mkdir build @@ -24,6 +29,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test unitary tests command: | mkdir build @@ -38,6 +48,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test utilities command: | mkdir build @@ -52,10 +67,13 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test python module. Generates and tests the python documentation command: | - git submodule init - git submodule update mkdir build cd build cmake -DWITH_GUDHI_THIRD_PARTY=OFF -DUSER_VERSION_DIR=version .. @@ -64,6 +82,7 @@ jobs: cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_REMOTE_TEST=ON . cd python python3 setup.py build_ext --inplace + ctest --output-on-failure make sphinx cp -R sphinx /tmp/sphinx python3 setup.py install @@ -83,10 +102,13 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Generates the C++ documentation with doxygen command: | - git submodule init - git submodule update mkdir build cd build cmake -DWITH_GUDHI_THIRD_PARTY=OFF -DUSER_VERSION_DIR=version .. @@ -104,6 +126,26 @@ jobs: path: /tmp/doxygen destination: doxygen + bibliography: + docker: + - image: gudhi/doxygen_for_gudhi:latest + steps: + - checkout + - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: + name: Test the LaTeX bibliography files + command: | + mkdir build + cd build + cmake -DWITH_GUDHI_THIRD_PARTY=OFF -DUSER_VERSION_DIR=version .. + cd biblio/test + latexmk -pdf -interaction=nonstopmode test_biblio.tex + latexmk -pdf -interaction=nonstopmode test_gudhi_citation.tex + ### With all third parties, except CGAL and Eigen @@ -114,6 +156,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test examples without cgal and eigen command: | mkdir build @@ -128,6 +175,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test unitary tests without cgal and eigen command: | mkdir build @@ -142,6 +194,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test utilities without cgal and eigen command: | mkdir build @@ -156,10 +213,13 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test python module without cgal and eigen command: | - git submodule init - git submodule update mkdir build cd build cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 .. @@ -176,6 +236,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test examples without cgal command: | mkdir build @@ -190,6 +255,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test unitary tests without cgal command: | mkdir build @@ -204,6 +274,11 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test utilities without cgal command: | mkdir build @@ -218,10 +293,13 @@ jobs: steps: - checkout - run: + name: Checkout submodules + command: | + git submodule sync + git submodule update --init + - run: name: Build and test python module without cgal command: | - git submodule init - git submodule update mkdir build cd build cmake -DCMAKE_BUILD_TYPE=Release -DEIGEN3_INCLUDE_DIR=/eigen-3.3.9 -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 .. @@ -246,3 +324,4 @@ workflows: - utils - python - doxygen + - bibliography diff --git a/.github/how_to_use_github_to_contribute_to_gudhi.md b/.github/how_to_use_github_to_contribute_to_gudhi.md index 738c1ce9..f72bb9d6 100644 --- a/.github/how_to_use_github_to_contribute_to_gudhi.md +++ b/.github/how_to_use_github_to_contribute_to_gudhi.md @@ -17,7 +17,7 @@ You can see your fork at https://github.com/LOGIN/gudhi-devel ## Create a local clone on your computer ```bash -git clone https://github.com/LOGIN/gudhi-devel.git +git clone --recurse-submodules https://github.com/LOGIN/gudhi-devel.git ``` This creates a directory gudhi-devel, which you are free to move around or rename. For the following, change to that directory: @@ -25,16 +25,14 @@ This creates a directory gudhi-devel, which you are free to move around or renam cd gudhi-devel ``` -When you clone the repository, you also need to download the *submodules*. - ## Submodules -Hera, used for Wasserstein distance, is available on an external git repository. To download it: +When you clone the repository, you also need to download the *submodules*. This is done automatically thanks to `--recurse-submodules`. +If you forgot this option, you can still download them with ```bash git submodule update --init ``` -[gudhi-deploy](https://github.com/GUDHI/gudhi-deploy) is used for Continuous Integration python -requirements and will also be downloaded by the above command. +The submodules appear in the `ext/` subdirectory. There are currently 2, [Hera](https://github.com/anigmetov/hera) for distances between persistence diagrams, and [gudhi-deploy](https://github.com/GUDHI/gudhi-deploy) for Continuous Integration. ## Configuring a remote for a fork ```bash @@ -68,6 +66,11 @@ It is safe, it will not mess with your files. git submodule sync git submodule update --init ``` +You can configure `git` to do this automatically with +```bash +git config submodule.recurse true +``` +(add `--global` if you want it to apply to other projects as well) ## Create a branch, based on the current master ```bash diff --git a/.github/next_release.md b/.github/next_release.md index d5fcef1c..929a7ce6 100644 --- a/.github/next_release.md +++ b/.github/next_release.md @@ -9,6 +9,9 @@ Below is a list of changes made since GUDHI 3.6.0: - [Module](link) - ... +- [Simplex tree](https://gudhi.inria.fr/python/latest/simplex_tree_ref.html) + - New functions to initialize from a matrix or insert batches of simplices of the same dimension. + - [Rips complex](https://gudhi.inria.fr/python/latest/rips_complex_user.html) - Construction now rejects positional arguments, you need to specify `points=X`. diff --git a/.github/workflows/pip-build-linux.yml b/.github/workflows/pip-build-linux.yml index 11b6271d..bc4f999e 100644 --- a/.github/workflows/pip-build-linux.yml +++ b/.github/workflows/pip-build-linux.yml @@ -12,16 +12,16 @@ jobs: - uses: actions/checkout@v3 with: submodules: true - - name: Build wheel for Python 3.10 + - name: Build wheel for Python 3.11 run: | - mkdir build_310 - cd build_310 - cmake -DCMAKE_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=$PYTHON310/bin/python .. + mkdir build_311 + cd build_311 + cmake -DCMAKE_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=$PYTHON311/bin/python .. cd src/python - $PYTHON310/bin/python setup.py bdist_wheel + $PYTHON311/bin/python setup.py bdist_wheel auditwheel repair dist/*.whl - - name: Install and test wheel for Python 3.10 + - name: Install and test wheel for Python 3.11 run: | - $PYTHON310/bin/python -m pip install --user pytest build_310/src/python/dist/*.whl - $PYTHON310/bin/python -c "import gudhi; print(gudhi.__version__)" - $PYTHON310/bin/python -m pytest src/python/test/test_alpha_complex.py + $PYTHON311/bin/python -m pip install --user pytest build_311/src/python/dist/*.whl + $PYTHON311/bin/python -c "import gudhi; print(gudhi.__version__)" + $PYTHON311/bin/python -m pytest src/python/test/test_alpha_complex.py diff --git a/.github/workflows/pip-build-osx.yml b/.github/workflows/pip-build-osx.yml index 59e94ca5..a438124a 100644 --- a/.github/workflows/pip-build-osx.yml +++ b/.github/workflows/pip-build-osx.yml @@ -2,13 +2,18 @@ name: pip build osx on: [push, pull_request] +env: + MACOSX_DEPLOYMENT_TARGET: 10.14 + _PYTHON_HOST_PLATFORM: macosx-10.14-universal2 + ARCHFLAGS: "-arch arm64 -arch x86_64" + jobs: build: runs-on: macos-latest strategy: max-parallel: 4 matrix: - python-version: ['3.10'] + python-version: ['3.11'] name: Build wheels for Python ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 @@ -24,14 +29,21 @@ jobs: brew install boost eigen gmp mpfr cgal || true python -m pip install --user -r ext/gudhi-deploy/build-requirements.txt python -m pip install --user twine delocate + ./scripts/build_osx_universal_gmpfr.sh + # Now the universal libraries are in $PWD/deps-uni/lib - name: Build python wheel run: | + export GMP_LIB_DIR=$PWD/deps-uni/lib + export GMPXX_LIB_DIR=$PWD/deps-uni/lib + export MPFR_LIB_DIR=$PWD/deps-uni/lib python --version mkdir build cd build cmake -DCMAKE_BUILD_TYPE=Release -DPython_ADDITIONAL_VERSIONS=3 .. cd src/python python setup.py bdist_wheel + export PATH="$PATH:`python -m site --user-base`/bin" + delocate-wheel --require-archs universal2 -v dist/*.whl - name: Install and test python wheel run: | python -m pip install --user pytest build/src/python/dist/*.whl diff --git a/.github/workflows/pip-build-windows.yml b/.github/workflows/pip-build-windows.yml index b3d75706..50bdfe2c 100644 --- a/.github/workflows/pip-build-windows.yml +++ b/.github/workflows/pip-build-windows.yml @@ -8,7 +8,7 @@ jobs: strategy: max-parallel: 4 matrix: - python-version: ['3.10'] + python-version: ['3.11'] name: Build wheels for Python ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/pip-packaging-linux.yml b/.github/workflows/pip-packaging-linux.yml index 285cfa00..14b1cf7a 100644 --- a/.github/workflows/pip-packaging-linux.yml +++ b/.github/workflows/pip-packaging-linux.yml @@ -79,6 +79,19 @@ jobs: $PYTHON310/bin/python -m pip install --user pytest build_310/src/python/dist/*.whl $PYTHON310/bin/python -c "import gudhi; print(gudhi.__version__)" $PYTHON310/bin/python -m pytest src/python/test/test_alpha_complex.py + - name: Build wheel for Python 3.11 + run: | + mkdir build_311 + cd build_311 + cmake -DCMAKE_BUILD_TYPE=Release -DPYTHON_EXECUTABLE=$PYTHON311/bin/python .. + cd src/python + $PYTHON311/bin/python setup.py bdist_wheel + auditwheel repair dist/*.whl + - name: Install and test wheel for Python 3.11 + run: | + $PYTHON311/bin/python -m pip install --user pytest build_311/src/python/dist/*.whl + $PYTHON311/bin/python -c "import gudhi; print(gudhi.__version__)" + $PYTHON311/bin/python -m pytest src/python/test/test_alpha_complex.py - name: Publish on PyPi env: TWINE_USERNAME: __token__ @@ -89,3 +102,4 @@ jobs: $PYTHON36/bin/python -m twine upload build_38/src/python/wheelhouse/* $PYTHON36/bin/python -m twine upload build_39/src/python/wheelhouse/* $PYTHON36/bin/python -m twine upload build_310/src/python/wheelhouse/* + $PYTHON36/bin/python -m twine upload build_311/src/python/wheelhouse/* diff --git a/.github/workflows/pip-packaging-osx.yml b/.github/workflows/pip-packaging-osx.yml index 3ae840c6..9ddbcfce 100644 --- a/.github/workflows/pip-packaging-osx.yml +++ b/.github/workflows/pip-packaging-osx.yml @@ -4,13 +4,18 @@ on: release: types: [published] +env: + MACOSX_DEPLOYMENT_TARGET: 10.15 + _PYTHON_HOST_PLATFORM: macosx-10.15-universal2 + ARCHFLAGS: "-arch arm64 -arch x86_64" + jobs: build: runs-on: macos-latest strategy: max-parallel: 4 matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] name: Build wheels for Python ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 @@ -26,8 +31,13 @@ jobs: brew install boost eigen gmp mpfr cgal || true python -m pip install --user -r ext/gudhi-deploy/build-requirements.txt python -m pip install --user twine delocate + ./scripts/build_osx_universal_gmpfr.sh + # Now the universal libs are in $PWD/deps-uni/lib - name: Build python wheel run: | + export GMP_LIB_DIR=$PWD/deps-uni/lib + export GMPXX_LIB_DIR=$PWD/deps-uni/lib + export MPFR_LIB_DIR=$PWD/deps-uni/lib python --version mkdir build cd build @@ -45,6 +55,7 @@ jobs: TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | mkdir wheelhouse - /Users/runner/.local/bin/delocate-listdeps build/src/python/dist/* - /Users/runner/.local/bin/delocate-wheel --require-archs x86_64 -w wheelhouse build/src/python/dist/* - python -m twine upload wheelhouse/*
\ No newline at end of file + export PATH="$PATH:`python -m site --user-base`/bin" + delocate-listdeps build/src/python/dist/* + delocate-wheel --require-archs universal2 -w wheelhouse build/src/python/dist/* + python -m twine upload wheelhouse/* diff --git a/.github/workflows/pip-packaging-windows.yml b/.github/workflows/pip-packaging-windows.yml index 6f544499..df0db9a5 100644 --- a/.github/workflows/pip-packaging-windows.yml +++ b/.github/workflows/pip-packaging-windows.yml @@ -10,7 +10,7 @@ jobs: strategy: max-parallel: 4 matrix: - python-version: ['3.7', '3.8', '3.9', '3.10'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] name: Build wheels for Python ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index 0a3ef43d..d8472ad0 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -1090,7 +1090,7 @@ language={English} @ARTICLE{Reininghaus_Huber_ALL_PSSK, author = {J. Reininghaus and S. Huber and U. Bauer and R. Kwitt}, title = {A Stable Multi-Scale Kernel for Topological Machine Learning.}, - journal = {Proc. 2015 IEEE Conf. Comp. Vision & Pat. Rec. (CVPR '15)}, + journal = {Proc. 2015 IEEE Conf. Comp. Vision \& Pat. Rec. (CVPR '15)}, year = {2015} } diff --git a/biblio/how_to_cite_gudhi.bib.in b/biblio/how_to_cite_gudhi.bib.in index 579dbf41..02c09dea 100644 --- a/biblio/how_to_cite_gudhi.bib.in +++ b/biblio/how_to_cite_gudhi.bib.in @@ -1,168 +1,262 @@ @book{gudhi:urm -, title = "{GUDHI} User and Reference Manual" -, author = "{The GUDHI Project}" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, year = @GUDHI_VERSION_YEAR@ -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/" +, title = {GUDHI User and Reference Manual} +, author = {The GUDHI Project} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, year = {@GUDHI_VERSION_YEAR@} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/} } -@incollection{gudhi:FilteredComplexes -, author = "Cl\'ement Maria" -, title = "Filtered Complexes" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__simplex__tree.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:CubicalComplex +, author = {Pawel Dlotko} +, title = {Cubical complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cubical__complex.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:PersistentCohomology -, author = "Cl\'ement Maria" -, title = "Persistent Cohomology" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__persistent__cohomology.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:FilteredComplexes +, author = {Cl{\'{e}}ment Maria} +, title = {Filtered Complexes} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__simplex__tree.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:Contraction -, author = "David Salinas" -, title = "Contraction" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__contr.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:ToplexMap +, author = {Fran{\c{c}}ois Godi} +, title = {Toplex map} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__toplex__map.html} +, year = {@GUDHI_VERSION_YEAR@} } @incollection{gudhi:SkeletonBlocker -, author = "David Salinas" -, title = "Skeleton-Blocker" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__skbl.html" -, year = @GUDHI_VERSION_YEAR@ +, author = {David Salinas} +, title = {Skeleton-Blocker} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__skbl.html} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:Contraction +, author = {David Salinas} +, title = {Contraction} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__contr.html} +, year = {@GUDHI_VERSION_YEAR@} } @incollection{gudhi:AlphaComplex -, author = "Vincent Rouvreau" -, title = "Alpha complex" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__alpha__complex.html" -, year = @GUDHI_VERSION_YEAR@ +, author = {Vincent Rouvreau} +, title = {Alpha complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__alpha__complex.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:CubicalComplex -, author = "Pawel Dlotko" -, title = "Cubical complex" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cubical__complex.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:CechComplex +, author = {Vincent Rouvreau and Hind Montassif} +, title = {{\v{C}}ech complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cech__complex.html} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:RipsComplex +, author = {Cl{\'{e}}ment Maria and Pawel Dlotko and Vincent Rouvreau and Marc Glisse} +, title = {Rips complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__rips__complex.html} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:Collapse +, author = {Siddharth Pritam and Marc Glisse} +, title = {Edge collapse} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__edge__collapse.html} +, year = {@GUDHI_VERSION_YEAR@} } @incollection{gudhi:WitnessComplex -, author = "Siargey Kachanovich" -, title = "Witness complex" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__witness__complex.html" -, year = @GUDHI_VERSION_YEAR@ +, author = {Siargey Kachanovich} +, title = {Witness complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__witness__complex.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:SubSampling -, author = "Cl\'ement Jamin and Siargey Kachanovich" -, title = "Subsampling" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__subsampling.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:CoverComplex +, author = {Mathieu Carri{\`{e}}re} +, title = {Cover complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cover__complex.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:SpatialSearching -, author = "Cl\'ement Jamin" -, title = "Spatial searching" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__spatial__searching.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:CoxeterTriangulation +, author = {Siargey Kachanovich} +, title = {Coxeter triangulation} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cover__complex.html} +, year = {@GUDHI_VERSION_YEAR@} } @incollection{gudhi:TangentialComplex -, author = "Cl\'ement Jamin" -, title = "Tangential complex" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__tangential__complex.html" -, year = @GUDHI_VERSION_YEAR@ +, author = {Cl{\'{e}}ment Jamin} +, title = {Tangential complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__tangential__complex.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:RipsComplex -, author = "Cl\'ement Maria and Pawel Dlotko and Vincent Rouvreau and Marc Glisse" -, title = "Rips complex" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__rips__complex.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:PersistentCohomology +, author = {Cl{\'{e}}ment Maria} +, title = {Persistent Cohomology} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__persistent__cohomology.html} +, year = {@GUDHI_VERSION_YEAR@} } @incollection{gudhi:BottleneckDistance -, author = "Fran{{\c{c}}ois Godi" -, title = "Bottleneck distance" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__bottleneck__distance.html" -, year = @GUDHI_VERSION_YEAR@ +, author = {Fran{\c{c}}ois Godi} +, title = {Bottleneck distance} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__bottleneck__distance.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:cython -, author = "Vincent Rouvreau" -, title = "Cython interface" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/python/@GUDHI_VERSION@/" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:PersistenceRepresentations +, author = {Pawel Dlotko} +, title = {Persistence representations} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group___persistence__representations.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:CoverComplex -, author = "Mathieu Carri\`ere" -, title = "Cover complex" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cover__complex.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:SubSampling +, author = {Cl{\'{e}}ment Jamin and Siargey Kachanovich} +, title = {Subsampling} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__subsampling.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:PersistenceRepresentations -, author = "Pawel Dlotko" -, title = "Persistence representations" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group___persistence__representations.html" -, year = @GUDHI_VERSION_YEAR@ +@incollection{gudhi:SpatialSearching +, author = {Cl{\'{e}}ment Jamin} +, title = {Spatial searching} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__spatial__searching.html} +, year = {@GUDHI_VERSION_YEAR@} } -@incollection{gudhi:Collapse -, author = "Siddharth Pritam and Marc Glisse" -, title = "Edge collapse" -, publisher = "{GUDHI Editorial Board}" -, edition = "{@GUDHI_VERSION@}" -, booktitle = "{GUDHI} User and Reference Manual" -, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__edge__collapse.html" -, year = @GUDHI_VERSION_YEAR@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Python specific gudhi modules +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +@incollection{gudhi:WeightedRipsComplex +, author = {Rapha{\"{e}}l Tinarrage and Yuichi Ike and Masatoshi Takenouchi} +, title = {Weighted Rips Complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/rips_complex_user.html#weighted-rips-complex} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:DTMRipsComplex +, author = {Yuichi Ike} +, title = {DTM Rips Complex} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/rips_complex_user.html#dtm-rips-complex} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:WassersteinDistance +, author = {Th{\'{e}}o Lacombe and Marc Glisse} +, title = {Wasserstein distance} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/wasserstein_distance_user.html} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:PersistenceRepresentationsScikitlearnInterface +, author = {Mathieu Carri{\`{e}}re and Gard Spreemann and Wojciech Reise} +, title = {Persistence representations scikit-learn like interface} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/representations.html} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:Atol +, author = {Martin Royer} +, title = {Measure Vectorization for Automatic Topologically-Oriented Learning} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/representations.html#gudhi.representations.vector_methods.Atol} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:DistanceToMeasure +, author = {Marc Glisse} +, title = {Distance to measure} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/point_cloud.html#module-gudhi.point_cloud.knn} +, year = {@GUDHI_VERSION_YEAR@} +} + +@incollection{gudhi:PersistenceBasedClustering +, author = {Marc Glisse} +, title = {persistence-based clustering} +, publisher = {GUDHI Editorial Board} +, edition = {@GUDHI_VERSION@} +, booktitle = {GUDHI User and Reference Manual} +, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/clustering.html} +, year = {@GUDHI_VERSION_YEAR@} } diff --git a/biblio/test/test_biblio.tex b/biblio/test/test_biblio.tex new file mode 100644 index 00000000..97dee9ed --- /dev/null +++ b/biblio/test/test_biblio.tex @@ -0,0 +1,7 @@ +\documentclass{article} +\usepackage{hyperref} +\bibliographystyle{plainurl} +\begin{document} +\nocite{*} +\bibliography{../bibliography} +\end{document}
\ No newline at end of file diff --git a/biblio/test/test_gudhi_citation.tex b/biblio/test/test_gudhi_citation.tex new file mode 100644 index 00000000..5fb2d33d --- /dev/null +++ b/biblio/test/test_gudhi_citation.tex @@ -0,0 +1,7 @@ +\documentclass{article} +\usepackage{hyperref} +\bibliographystyle{plainurl} +\begin{document} +\nocite{*} +\bibliography{../how_to_cite_gudhi} +\end{document}
\ No newline at end of file diff --git a/ext/hera b/ext/hera -Subproject b528c4067a8aac346eb307d3c23b82d5953cfe2 +Subproject 8bfdd4bd32f005c18b5c75c502b987de552d6e4 diff --git a/scripts/build_osx_universal_gmpfr.sh b/scripts/build_osx_universal_gmpfr.sh new file mode 100755 index 00000000..3dafa3ce --- /dev/null +++ b/scripts/build_osx_universal_gmpfr.sh @@ -0,0 +1,47 @@ +#!/bin/bash +set -e + +# In the working directory, creates deps-uni/lib/* +# Assumes that the user has enough rights to run brew fetch + +# Downloading +mkdir deps-amd64 +cd deps-amd64 +tar xf "`brew fetch --bottle-tag=big_sur gmp | sed -ne 's/^Downloaded to: //p'`" +tar xf "`brew fetch --bottle-tag=big_sur mpfr | sed -ne 's/^Downloaded to: //p'`" +cd .. +mkdir deps-arm64 +cd deps-arm64 +tar xf "`brew fetch --bottle-tag=arm64_big_sur gmp | sed -ne 's/^Downloaded to: //p'`" +tar xf "`brew fetch --bottle-tag=arm64_big_sur mpfr | sed -ne 's/^Downloaded to: //p'`" +cd .. + +# Merging +mkdir -p deps-uni/lib +GMP1=deps-amd64/gmp/*/lib/libgmp.*.dylib +GMP=`basename $GMP1` +GMPXX1=deps-amd64/gmp/*/lib/libgmpxx.*.dylib +GMPXX=`basename $GMPXX1` +MPFR1=deps-amd64/mpfr/*/lib/libmpfr.*.dylib +MPFR=`basename $MPFR1` +lipo -create $GMP1 deps-arm64/gmp/*/lib/$GMP -output deps-uni/lib/$GMP +lipo -create $GMPXX1 deps-arm64/gmp/*/lib/$GMPXX -output deps-uni/lib/$GMPXX +lipo -create $MPFR1 deps-arm64/mpfr/*/lib/$MPFR -output deps-uni/lib/$MPFR + +# Necessary even for libs created by lipo +install_name_tool -id $PWD/deps-uni/lib/$GMP deps-uni/lib/$GMP +install_name_tool -id $PWD/deps-uni/lib/$GMPXX deps-uni/lib/$GMPXX +install_name_tool -id $PWD/deps-uni/lib/$MPFR deps-uni/lib/$MPFR +# Also fix dependencies +BADGMP=`otool -L deps-uni/lib/$MPFR|sed -ne 's/[[:space:]]*\(.*libgmp\..*dylib\).*/\1/p'` +install_name_tool -change $BADGMP $PWD/deps-uni/lib/$GMP deps-uni/lib/$MPFR +BADGMP=`otool -L deps-uni/lib/$GMPXX|sed -ne 's/[[:space:]]*\(.*libgmp\..*dylib\).*/\1/p'` +install_name_tool -change $BADGMP $PWD/deps-uni/lib/$GMP deps-uni/lib/$GMPXX + +ln -s $GMP deps-uni/lib/libgmp.dylib +ln -s $GMPXX deps-uni/lib/libgmpxx.dylib +ln -s $MPFR deps-uni/lib/libmpfr.dylib + +# Debug +ls -l deps-uni/lib +otool -L deps-uni/lib/*.*.dylib diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h index 595fb64b..73093c07 100644 --- a/src/Cech_complex/doc/Intro_cech_complex.h +++ b/src/Cech_complex/doc/Intro_cech_complex.h @@ -17,7 +17,7 @@ namespace cech_complex { /** \defgroup cech_complex ÄŒech complex * - * \author Vincent Rouvreau + * \author Vincent Rouvreau, Hind montassif * * @{ * diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h index 625f7c9c..dbdf5e93 100644 --- a/src/Cech_complex/include/gudhi/Cech_complex.h +++ b/src/Cech_complex/include/gudhi/Cech_complex.h @@ -1,11 +1,12 @@ /* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT. * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details. - * Author(s): Vincent Rouvreau + * Author(s): Vincent Rouvreau, Hind Montassif * * Copyright (C) 2018 Inria * * Modification(s): * - YYYY/MM Author: Description of the modification + * - 2022/02 Hind Montassif : Replace MiniBall with Sphere_circumradius */ #ifndef CECH_COMPLEX_H_ diff --git a/src/Cech_complex/utilities/cechcomplex.md b/src/Cech_complex/utilities/cechcomplex.md index 0e82674d..54c4e88d 100644 --- a/src/Cech_complex/utilities/cechcomplex.md +++ b/src/Cech_complex/utilities/cechcomplex.md @@ -36,14 +36,14 @@ where * `-h [ --help ]` Produce help message * `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output. -* `-r [ --max-edge-length ]` (default = inf) Maximal length of an edge for the ÄŒech complex construction. +* `-r [ --max-radius ]` (default = inf) Maximal radius for the ÄŒech complex construction. * `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the ÄŒech complex we want to compute. * `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology. * `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. * `-e [ --exact ]` for the exact computation version. * `-f [ --fast ]` for the fast computation version. -Beware: this program may use a lot of RAM and take a lot of time if `max-edge-length` is set to a large value. +Beware: this program may use a lot of RAM and take a lot of time if `max-radius` is set to a large value. **Example 1 with Z/2Z coefficients** diff --git a/src/Doxyfile.in b/src/Doxyfile.in index 1ec190d9..d5664a49 100644 --- a/src/Doxyfile.in +++ b/src/Doxyfile.in @@ -700,7 +700,6 @@ LAYOUT_FILE = # search path. See also \cite for info how to create references. CITE_BIB_FILES = @CMAKE_SOURCE_DIR@/biblio/bibliography.bib \ - @CMAKE_SOURCE_DIR@/biblio/how_to_cite_cgal.bib \ @CMAKE_SOURCE_DIR@/biblio/how_to_cite_gudhi.bib #--------------------------------------------------------------------------- diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt index 4b0f4677..9faf1f3b 100644 --- a/src/Nerve_GIC/example/CMakeLists.txt +++ b/src/Nerve_GIC/example/CMakeLists.txt @@ -1,25 +1,21 @@ project(Nerve_GIC_examples) -if (NOT CGAL_VERSION VERSION_LESS 4.11.0) +add_executable ( CoordGIC CoordGIC.cpp ) +add_executable ( FuncGIC FuncGIC.cpp ) - add_executable ( CoordGIC CoordGIC.cpp ) - add_executable ( FuncGIC FuncGIC.cpp ) +if (TBB_FOUND) + target_link_libraries(CoordGIC ${TBB_LIBRARIES}) + target_link_libraries(FuncGIC ${TBB_LIBRARIES}) +endif() - if (TBB_FOUND) - target_link_libraries(CoordGIC ${TBB_LIBRARIES}) - target_link_libraries(FuncGIC ${TBB_LIBRARIES}) - endif() +# Copy files for not to pollute sources when testing +file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - # Copy files for not to pollute sources when testing - file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $<TARGET_FILE:CoordGIC> + "${CMAKE_CURRENT_BINARY_DIR}/tore3D_1307.off" "0") - add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $<TARGET_FILE:CoordGIC> - "${CMAKE_CURRENT_BINARY_DIR}/tore3D_1307.off" "0") - - add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $<TARGET_FILE:FuncGIC> - "${CMAKE_CURRENT_BINARY_DIR}/lucky_cat.off" - "${CMAKE_CURRENT_BINARY_DIR}/lucky_cat_PCA1") - -endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) +add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $<TARGET_FILE:FuncGIC> + "${CMAKE_CURRENT_BINARY_DIR}/lucky_cat.off" + "${CMAKE_CURRENT_BINARY_DIR}/lucky_cat_PCA1") diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h index 1b1f9323..047fba61 100644 --- a/src/Nerve_GIC/include/gudhi/GIC.h +++ b/src/Nerve_GIC/include/gudhi/GIC.h @@ -17,6 +17,14 @@ #include <mutex> #endif +#if __has_include(<CGAL/version.h>) +# define GUDHI_GIC_USE_CGAL 1 +# include <gudhi/Bottleneck.h> +#elif __has_include(<hera/bottleneck.h>) +# define GUDHI_GIC_USE_HERA 1 +# include <hera/bottleneck.h> +#endif + #include <gudhi/Debug_utils.h> #include <gudhi/graph_simplicial_complex.h> #include <gudhi/reader_utils.h> @@ -25,7 +33,6 @@ #include <gudhi/Points_off_io.h> #include <gudhi/distance_functions.h> #include <gudhi/Persistent_cohomology.h> -#include <gudhi/Bottleneck.h> #include <boost/config.hpp> #include <boost/graph/graph_traits.hpp> @@ -35,8 +42,6 @@ #include <boost/graph/subgraph.hpp> #include <boost/graph/graph_utility.hpp> -#include <CGAL/version.h> // for CGAL_VERSION_NR - #include <iostream> #include <vector> #include <map> @@ -1228,7 +1233,14 @@ class Cover_complex { Cboot.set_cover_from_function(); Cboot.find_simplices(); Cboot.compute_PD(); +#ifdef GUDHI_GIC_USE_CGAL double db = Gudhi::persistence_diagram::bottleneck_distance(this->PD, Cboot.PD); +#elif defined GUDHI_GIC_USE_HERA + double db = hera::bottleneckDistExact(this->PD, Cboot.PD); +#else + double db; + throw std::logic_error("This function requires CGAL or Hera for the bottleneck distance."); +#endif if (verbose) std::clog << db << std::endl; distribution.push_back(db); } diff --git a/src/Nerve_GIC/test/CMakeLists.txt b/src/Nerve_GIC/test/CMakeLists.txt index 567bf43f..e012a178 100644 --- a/src/Nerve_GIC/test/CMakeLists.txt +++ b/src/Nerve_GIC/test/CMakeLists.txt @@ -1,15 +1,12 @@ project(Graph_induced_complex_tests) -if (NOT CGAL_VERSION VERSION_LESS 4.11.0) - include(GUDHI_boost_test) +include(GUDHI_boost_test) - add_executable ( Nerve_GIC_test_unit test_GIC.cpp ) - if (TBB_FOUND) - target_link_libraries(Nerve_GIC_test_unit ${TBB_LIBRARIES}) - endif() +add_executable ( Nerve_GIC_test_unit test_GIC.cpp ) +if (TBB_FOUND) + target_link_libraries(Nerve_GIC_test_unit ${TBB_LIBRARIES}) +endif() - file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - gudhi_add_boost_test(Nerve_GIC_test_unit) - -endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) +gudhi_add_boost_test(Nerve_GIC_test_unit) diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt index 65a08d9a..4521a992 100644 --- a/src/Nerve_GIC/utilities/CMakeLists.txt +++ b/src/Nerve_GIC/utilities/CMakeLists.txt @@ -1,27 +1,23 @@ project(Nerve_GIC_examples) -if (NOT CGAL_VERSION VERSION_LESS 4.11.0) +add_executable ( Nerve Nerve.cpp ) +add_executable ( VoronoiGIC VoronoiGIC.cpp ) - add_executable ( Nerve Nerve.cpp ) - add_executable ( VoronoiGIC VoronoiGIC.cpp ) +if (TBB_FOUND) + target_link_libraries(Nerve ${TBB_LIBRARIES}) + target_link_libraries(VoronoiGIC ${TBB_LIBRARIES}) +endif() - if (TBB_FOUND) - target_link_libraries(Nerve ${TBB_LIBRARIES}) - target_link_libraries(VoronoiGIC ${TBB_LIBRARIES}) - endif() +file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +# Copy files for not to pollute sources when testing +file(COPY "${CMAKE_SOURCE_DIR}/data/points/human.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - # Copy files for not to pollute sources when testing - file(COPY "${CMAKE_SOURCE_DIR}/data/points/human.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +add_test(NAME Nerve_GIC_utilities_nerve COMMAND $<TARGET_FILE:Nerve> + "human.off" "2" "10" "0.3") - add_test(NAME Nerve_GIC_utilities_nerve COMMAND $<TARGET_FILE:Nerve> - "human.off" "2" "10" "0.3") +add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $<TARGET_FILE:VoronoiGIC> + "human.off" "100") - add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $<TARGET_FILE:VoronoiGIC> - "human.off" "100") - - install(TARGETS Nerve DESTINATION bin) - install(TARGETS VoronoiGIC DESTINATION bin) - install(FILES KeplerMapperVisuFromTxtFile.py km.py km.py.COPYRIGHT DESTINATION bin) - -endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) +install(TARGETS Nerve DESTINATION bin) +install(TARGETS VoronoiGIC DESTINATION bin) +install(FILES KeplerMapperVisuFromTxtFile.py km.py km.py.COPYRIGHT DESTINATION bin) diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 9059219c..4177a0b8 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -24,6 +24,8 @@ #include <boost/iterator/transform_iterator.hpp> #include <boost/graph/adjacency_list.hpp> #include <boost/range/adaptor/reversed.hpp> +#include <boost/range/adaptor/transformed.hpp> +#include <boost/range/size.hpp> #include <boost/container/static_vector.hpp> #ifdef GUDHI_USE_TBB @@ -702,10 +704,10 @@ class Simplex_tree { return true; } - private: - /** \brief Inserts a simplex represented by a vector of vertex. - * @param[in] simplex vector of Vertex_handles, representing the vertices of the new simplex. The vector must be - * sorted by increasing vertex handle order. + protected: + /** \brief Inserts a simplex represented by a range of vertex. + * @param[in] simplex range of Vertex_handles, representing the vertices of the new simplex. The range must be + * sorted by increasing vertex handle order, and not empty. * @param[in] filtration the filtration value assigned to the new simplex. * @return If the new simplex is inserted successfully (i.e. it was not in the * simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned @@ -717,12 +719,13 @@ class Simplex_tree { * null_simplex. * */ - std::pair<Simplex_handle, bool> insert_vertex_vector(const std::vector<Vertex_handle>& simplex, + template <class RandomVertexHandleRange = std::initializer_list<Vertex_handle>> + std::pair<Simplex_handle, bool> insert_simplex_raw(const RandomVertexHandleRange& simplex, Filtration_value filtration) { Siblings * curr_sib = &root_; std::pair<Simplex_handle, bool> res_insert; auto vi = simplex.begin(); - for (; vi != simplex.end() - 1; ++vi) { + for (; vi != std::prev(simplex.end()); ++vi) { GUDHI_CHECK(*vi != null_vertex(), "cannot use the dummy null_vertex() as a real vertex"); res_insert = curr_sib->members_.emplace(*vi, Node(curr_sib, filtration)); if (!(has_children(res_insert.first))) { @@ -743,9 +746,10 @@ class Simplex_tree { return std::pair<Simplex_handle, bool>(null_simplex(), false); } // otherwise the insertion has succeeded - size is a size_type - if (static_cast<int>(simplex.size()) - 1 > dimension_) { + int dim = static_cast<int>(boost::size(simplex)) - 1; + if (dim > dimension_) { // Update dimension if needed - dimension_ = static_cast<int>(simplex.size()) - 1; + dimension_ = dim; } return res_insert; } @@ -786,7 +790,7 @@ class Simplex_tree { // Copy before sorting std::vector<Vertex_handle> copy(first, last); std::sort(std::begin(copy), std::end(copy)); - return insert_vertex_vector(copy, filtration); + return insert_simplex_raw(copy, filtration); } /** \brief Insert a N-simplex and all his subfaces, from a N-simplex represented by a range of @@ -1119,16 +1123,12 @@ class Simplex_tree { dimension_ = 1; } - root_.members_.reserve(num_vertices(skel_graph)); + root_.members_.reserve(num_vertices(skel_graph)); // probably useless in most cases + auto verts = vertices(skel_graph) | boost::adaptors::transformed([&](auto v){ + return Dit_value_t(v, Node(&root_, get(vertex_filtration_t(), skel_graph, v))); }); + root_.members_.insert(boost::begin(verts), boost::end(verts)); + // This automatically sorts the vertices, the graph concept doesn't guarantee the order in which we iterate. - typename boost::graph_traits<OneSkeletonGraph>::vertex_iterator v_it, - v_it_end; - for (std::tie(v_it, v_it_end) = vertices(skel_graph); v_it != v_it_end; - ++v_it) { - root_.members_.emplace_hint( - root_.members_.end(), *v_it, - Node(&root_, get(vertex_filtration_t(), skel_graph, *v_it))); - } std::pair<typename boost::graph_traits<OneSkeletonGraph>::edge_iterator, typename boost::graph_traits<OneSkeletonGraph>::edge_iterator> boost_edges = edges(skel_graph); // boost_edges.first is the equivalent to boost_edges.begin() @@ -1137,7 +1137,7 @@ class Simplex_tree { auto edge = *(boost_edges.first); auto u = source(edge, skel_graph); auto v = target(edge, skel_graph); - if (u == v) throw "Self-loops are not simplicial"; + if (u == v) throw std::invalid_argument("Self-loops are not simplicial"); // We cannot skip edges with the wrong orientation and expect them to // come a second time with the right orientation, that does not always // happen in practice. emplace() should be a NOP when an element with the @@ -1156,6 +1156,21 @@ class Simplex_tree { } } + /** \brief Inserts several vertices. + * @param[in] vertices A range of Vertex_handle + * @param[in] filt filtration value of the new vertices (the same for all) + * + * This may be faster than inserting the vertices one by one, especially in a random order. + * The complex does not need to be empty before calling this function. However, if a vertex is + * already present, its filtration value is not modified, unlike with other insertion functions. */ + template <class VertexRange> + void insert_batch_vertices(VertexRange const& vertices, Filtration_value filt = 0) { + auto verts = vertices | boost::adaptors::transformed([&](auto v){ + return Dit_value_t(v, Node(&root_, filt)); }); + root_.members_.insert(boost::begin(verts), boost::end(verts)); + if (dimension_ < 0 && !root_.members_.empty()) dimension_ = 0; + } + /** \brief Expands the Simplex_tree containing only its one skeleton * until dimension max_dim. * @@ -1598,7 +1613,7 @@ class Simplex_tree { Simplex_tree st_copy = *this; // Add point for coning the simplicial complex - this->insert_simplex({maxvert}, -3); + this->insert_simplex_raw({maxvert}, -3); // For each simplex std::vector<Vertex_handle> vr; diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp index 79bb5a93..ebcc406c 100644 --- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp @@ -1038,3 +1038,17 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_boundaries_and_opposite_vertex_iterat BOOST_CHECK(opposite_vertices.size() == 0); } } + +BOOST_AUTO_TEST_CASE(batch_vertices) { + typedef Simplex_tree<> typeST; + std::clog << "********************************************************************" << std::endl; + std::clog << "TEST BATCH VERTEX INSERTION" << std::endl; + typeST st; + st.insert_simplex_and_subfaces({3}, 1.5); + std::vector verts { 2, 3, 5, 6 }; + st.insert_batch_vertices(verts); + BOOST_CHECK(st.num_vertices() == 4); + BOOST_CHECK(st.num_simplices() == 4); + BOOST_CHECK(st.filtration(st.find({2})) == 0.); + BOOST_CHECK(st.filtration(st.find({3})) == 1.5); +} diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h index 56a24af0..ab203ca5 100644 --- a/src/Tangential_complex/include/gudhi/Tangential_complex.h +++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h @@ -56,6 +56,7 @@ #include <string> #include <cstddef> // for std::size_t #include <optional> +#include <numeric> // for std::iota #ifdef GUDHI_USE_TBB #include <tbb/parallel_for.h> @@ -345,10 +346,11 @@ class Tangential_complex { m_stars.resize(m_points.size()); m_squared_star_spheres_radii_incl_margin.resize(m_points.size(), FT(-1)); #ifdef GUDHI_TC_PERTURB_POSITION - if (m_points.empty()) + if (m_points.empty()) { m_translations.clear(); - else + } else { m_translations.resize(m_points.size(), m_k.construct_vector_d_object()(m_ambient_dim)); + } #if defined(GUDHI_USE_TBB) delete[] m_p_perturb_mutexes; m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()]; @@ -623,6 +625,11 @@ class Tangential_complex { int max_dim = -1; + // Ordered vertices to be inserted first by the create_complex method to avoid quadratic complexity. + std::vector<typename Simplex_tree_::Vertex_handle> vertices(m_points.size()); + std::iota(vertices.begin(), vertices.end(), 0); + tree.insert_batch_vertices(vertices); + // For each triangulation for (std::size_t idx = 0; idx < m_points.size(); ++idx) { // For each cell of the star diff --git a/src/cmake/modules/GUDHI_submodules.cmake b/src/cmake/modules/GUDHI_submodules.cmake index 78b045bd..9ede852d 100644 --- a/src/cmake/modules/GUDHI_submodules.cmake +++ b/src/cmake/modules/GUDHI_submodules.cmake @@ -1,5 +1,5 @@ # For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera. -set(HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include) -set(HERA_WASSERSTEIN_INCLUDE_DIR ${HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's wasserstein.h") -set(HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include) -set(HERA_BOTTLENECK_INCLUDE_DIR ${HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's bottleneck.h")
\ No newline at end of file +set(HERA_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/include) +set(HERA_INCLUDE_DIR ${HERA_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find hera/{wasserstein.h,bottleneck.h}") +# since everything is cleanly under include/hera/, there is no harm always including it +include_directories(${HERA_INCLUDE_DIR}) diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake index 4487ad86..b9bf1414 100644 --- a/src/cmake/modules/GUDHI_user_version_target.cmake +++ b/src/cmake/modules/GUDHI_user_version_target.cmake @@ -18,14 +18,17 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E string(TIMESTAMP GUDHI_VERSION_YEAR "%Y") configure_file(${CMAKE_SOURCE_DIR}/biblio/how_to_cite_gudhi.bib.in "${CMAKE_CURRENT_BINARY_DIR}/biblio/how_to_cite_gudhi.bib" @ONLY) file(COPY "${CMAKE_SOURCE_DIR}/biblio/bibliography.bib" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/biblio/") +file(COPY "${CMAKE_SOURCE_DIR}/biblio/test" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/biblio") # append cgal citation inside bibliography - sphinx cannot deal with more than one bib file file(READ "${CMAKE_SOURCE_DIR}/biblio/how_to_cite_cgal.bib" CGAL_CITATION_CONTENT) file(APPEND "${CMAKE_CURRENT_BINARY_DIR}/biblio/bibliography.bib" "${CGAL_CITATION_CONTENT}") -# Copy biblio directory for user version +# Copy biblio files for user version add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E - copy_directory ${CMAKE_CURRENT_BINARY_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio) + copy ${CMAKE_CURRENT_BINARY_DIR}/biblio/bibliography.bib ${GUDHI_USER_VERSION_DIR}/biblio/bibliography.bib) +add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E + copy ${CMAKE_CURRENT_BINARY_DIR}/biblio/how_to_cite_gudhi.bib ${GUDHI_USER_VERSION_DIR}/biblio/how_to_cite_gudhi.bib) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_SOURCE_DIR}/README.md ${GUDHI_USER_VERSION_DIR}/README.md) @@ -60,10 +63,9 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI) -if(HERA_WASSERSTEIN_INCLUDE_DIR STREQUAL HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR OR - HERA_BOTTLENECK_INCLUDE_DIR STREQUAL HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR) +if(HERA_INCLUDE_DIR STREQUAL HERA_INTERNAL_INCLUDE_DIR) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E - copy_directory ${CMAKE_SOURCE_DIR}/ext/hera ${GUDHI_USER_VERSION_DIR}/ext/hera) + copy_directory ${CMAKE_SOURCE_DIR}/ext/hera/include ${GUDHI_USER_VERSION_DIR}/ext/hera/include) endif() set(GUDHI_DIRECTORIES "doc;example;concept;utilities") diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md index ce903405..9b7c2853 100644 --- a/src/common/doc/main_page.md +++ b/src/common/doc/main_page.md @@ -178,7 +178,7 @@ The set of all simplices is filtered by the radius of their minimal enclosing ball. </td> <td width="15%"> - <b>Author:</b> Vincent Rouvreau<br> + <b>Author:</b> Vincent Rouvreau, Hind Montassif<br> <b>Introduced in:</b> GUDHI 2.2.0<br> <b>Copyright:</b> MIT [(LGPL v3)](../../licensing/)<br> <b>Requires:</b> \ref cgal diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt index 32ec13bd..39e2acd4 100644 --- a/src/python/CMakeLists.txt +++ b/src/python/CMakeLists.txt @@ -44,7 +44,7 @@ function( add_gudhi_debug_info DEBUG_INFO ) endfunction( add_gudhi_debug_info ) if(PYTHONINTERP_FOUND) - if(PYBIND11_FOUND AND CYTHON_FOUND) + if(NUMPY_FOUND AND PYBIND11_FOUND AND CYTHON_FOUND) add_gudhi_debug_info("Pybind11 version ${PYBIND11_VERSION}") # PyBind11 modules set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'bottleneck', ") @@ -163,10 +163,10 @@ if(PYTHONINTERP_FOUND) set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'clustering/_tomato', ") set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'hera/wasserstein', ") set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'hera/bottleneck', ") + set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ") if (NOT CGAL_VERSION VERSION_LESS 4.11.0) set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'datasets/generators/_points', ") set(GUDHI_PYBIND11_MODULES "${GUDHI_PYBIND11_MODULES}'bottleneck', ") - set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ") endif () if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0) set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'subsampling', ") @@ -432,38 +432,38 @@ if(PYTHONINTERP_FOUND) ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py") add_gudhi_py_test(test_bottleneck_distance) + endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) - # Cover complex - file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - add_test(NAME cover_complex_nerve_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py" - -f human.off -c 2 -r 10 -g 0.3) + # Cover complex + file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + add_test(NAME cover_complex_nerve_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py" + -f human.off -c 2 -r 10 -g 0.3) - add_test(NAME cover_complex_coordinate_gic_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py" - -f human.off -c 0 -v) + add_test(NAME cover_complex_coordinate_gic_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py" + -f human.off -c 0 -v) - add_test(NAME cover_complex_functional_gic_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py" - -o lucky_cat.off - -f lucky_cat_PCA1 -v) + add_test(NAME cover_complex_functional_gic_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py" + -o lucky_cat.off + -f lucky_cat_PCA1 -v) - add_test(NAME cover_complex_voronoi_gic_example_py_test - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" - ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py" - -f human.off -n 700 -v) + add_test(NAME cover_complex_voronoi_gic_example_py_test + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}" + ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py" + -f human.off -n 700 -v) - add_gudhi_py_test(test_cover_complex) - endif (NOT CGAL_VERSION VERSION_LESS 4.11.0) + add_gudhi_py_test(test_cover_complex) if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) # Alpha @@ -623,10 +623,10 @@ if(PYTHONINTERP_FOUND) # Set missing or not modules set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES") - else(PYBIND11_FOUND AND CYTHON_FOUND) - message("++ Python module will not be compiled because cython and/or pybind11 was/were not found") + else(NUMPY_FOUND AND PYBIND11_FOUND AND CYTHON_FOUND) + message("++ Python module will not be compiled because numpy and/or cython and/or pybind11 was/were not found") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES") - endif(PYBIND11_FOUND AND CYTHON_FOUND) + endif(NUMPY_FOUND AND PYBIND11_FOUND AND CYTHON_FOUND) else(PYTHONINTERP_FOUND) message("++ Python module will not be compiled because no Python interpreter was found") set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES") diff --git a/src/python/doc/representations_sum.inc b/src/python/doc/representations_sum.inc index 4298aea9..9515f044 100644 --- a/src/python/doc/representations_sum.inc +++ b/src/python/doc/representations_sum.inc @@ -1,14 +1,14 @@ .. table:: :widths: 30 40 30 - +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+ - | .. figure:: | Vectorizations, distances and kernels that work on persistence | :Author: Mathieu Carrière, Martin Royer | - | img/sklearn-tda.png | diagrams, compatible with scikit-learn. | | - | | | :Since: GUDHI 3.1.0 | - | | | | - | | | :License: MIT | - | | | | - | | | :Requires: `Scikit-learn <installation.html#scikit-learn>`_ | - +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+ - | * :doc:`representations` | - +------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------+ + +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------------------+ + | .. figure:: | Vectorizations, distances and kernels that work on persistence | :Author: Mathieu Carrière, Martin Royer, Gard Spreemann, Wojciech Reise | + | img/sklearn-tda.png | diagrams, compatible with scikit-learn. | | + | | | :Since: GUDHI 3.1.0 | + | | | | + | | | :License: MIT | + | | | | + | | | :Requires: `Scikit-learn <installation.html#scikit-learn>`_ | + +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------------------+ + | * :doc:`representations` | + +------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/src/python/gudhi/hera/bottleneck.cc b/src/python/gudhi/hera/bottleneck.cc index 0cb562ce..ec461f7c 100644 --- a/src/python/gudhi/hera/bottleneck.cc +++ b/src/python/gudhi/hera/bottleneck.cc @@ -16,7 +16,7 @@ using py::ssize_t; #endif -#include <bottleneck.h> // Hera +#include <hera/bottleneck.h> // Hera double bottleneck_distance(Dgm d1, Dgm d2, double delta) { diff --git a/src/python/gudhi/hera/wasserstein.cc b/src/python/gudhi/hera/wasserstein.cc index fa0cf8aa..3516352e 100644 --- a/src/python/gudhi/hera/wasserstein.cc +++ b/src/python/gudhi/hera/wasserstein.cc @@ -8,10 +8,16 @@ * - YYYY/MM Author: Description of the modification */ -#include <wasserstein.h> // Hera - #include <pybind11_diagram_utils.h> +#ifdef _MSC_VER +// https://github.com/grey-narn/hera/issues/3 +// ssize_t is a non-standard type (well, posix) +using py::ssize_t; +#endif + +#include <hera/wasserstein.h> // Hera + double wasserstein_distance( Dgm d1, Dgm d2, double wasserstein_power, double internal_p, diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py index 9e6db960..e1402aea 100644 --- a/src/python/gudhi/representations/vector_methods.py +++ b/src/python/gudhi/representations/vector_methods.py @@ -13,8 +13,13 @@ import numpy as np from sklearn.base import BaseEstimator, TransformerMixin from sklearn.exceptions import NotFittedError from sklearn.preprocessing import MinMaxScaler, MaxAbsScaler -from sklearn.neighbors import DistanceMetric from sklearn.metrics import pairwise +try: + # New location since 1.0 + from sklearn.metrics import DistanceMetric +except ImportError: + # Will be removed in 1.3 + from sklearn.neighbors import DistanceMetric from .preprocessing import DiagramScaler, BirthPersistenceTransform diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd index 5642f82d..5309c6fa 100644 --- a/src/python/gudhi/simplex_tree.pxd +++ b/src/python/gudhi/simplex_tree.pxd @@ -56,6 +56,8 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi": int upper_bound_dimension() nogil bool find_simplex(vector[int] simplex) nogil bool insert(vector[int] simplex, double filtration) nogil + void insert_matrix(double* filtrations, int n, int stride0, int stride1, double max_filtration) nogil except + + void insert_batch_vertices(vector[int] v, double f) nogil except + vector[pair[vector[int], double]] get_star(vector[int] simplex) nogil vector[pair[vector[int], double]] get_cofaces(vector[int] simplex, int dimension) nogil void expansion(int max_dim) nogil except + diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx index 05bfe22e..4cf176f5 100644 --- a/src/python/gudhi/simplex_tree.pyx +++ b/src/python/gudhi/simplex_tree.pyx @@ -8,14 +8,24 @@ # - YYYY/MM Author: Description of the modification from cython.operator import dereference, preincrement -from libc.stdint cimport intptr_t +from libc.stdint cimport intptr_t, int32_t, int64_t import numpy as np cimport gudhi.simplex_tree +cimport cython +from numpy.math cimport INFINITY __author__ = "Vincent Rouvreau" __copyright__ = "Copyright (C) 2016 Inria" __license__ = "MIT" +ctypedef fused some_int: + int32_t + int64_t + +ctypedef fused some_float: + float + double + cdef bool callback(vector[int] simplex, void *blocker_func): return (<object>blocker_func)(simplex) @@ -228,6 +238,91 @@ cdef class SimplexTree: """ return self.get_ptr().insert(simplex, <double>filtration) + @staticmethod + @cython.boundscheck(False) + def create_from_array(filtrations, double max_filtration=INFINITY): + """Creates a new, empty complex and inserts vertices and edges. The vertices are numbered from 0 to n-1, and + the filtration values are encoded in the array, with the diagonal representing the vertices. It is the + caller's responsibility to ensure that this defines a filtration, which can be achieved with either:: + + filtrations[np.diag_indices_from(filtrations)] = filtrations.min(axis=1) + + or:: + + diag = filtrations.diagonal() + filtrations = np.fmax(np.fmax(filtrations, diag[:, None]), diag[None, :]) + + :param filtrations: the filtration values of the vertices and edges to insert. The matrix is assumed to be symmetric. + :type filtrations: numpy.ndarray of shape (n,n) + :param max_filtration: only insert vertices and edges with filtration values no larger than max_filtration + :type max_filtration: float + :returns: the new complex + :rtype: SimplexTree + """ + # TODO: document which half of the matrix is actually read? + filtrations = np.asanyarray(filtrations, dtype=float) + cdef double[:,:] F = filtrations + ret = SimplexTree() + cdef int n = F.shape[0] + assert n == F.shape[1], 'create_from_array() expects a square array' + with nogil: + ret.get_ptr().insert_matrix(&F[0,0], n, F.strides[0], F.strides[1], max_filtration) + return ret + + def insert_edges_from_coo_matrix(self, edges): + """Inserts edges given by a sparse matrix in `COOrdinate format + <https://docs.scipy.org/doc/scipy/reference/generated/scipy.sparse.coo_matrix.html>`_. + If an edge is repeated, the smallest filtration value is used. Missing entries are not inserted. + Diagonal entries are currently interpreted as vertices, although we do not guarantee this behavior + in the future, and this is only useful if you want to insert vertices with a smaller filtration value + than the smallest edge containing it, since vertices are implicitly inserted together with the edges. + + :param edges: the edges to insert and their filtration values. + :type edges: scipy.sparse.coo_matrix of shape (n,n) + + .. seealso:: :func:`insert_batch` + """ + # Without this, it could be slow if we end up inserting vertices in a bad order (flat_map). + self.get_ptr().insert_batch_vertices(np.unique(np.stack((edges.row, edges.col))), INFINITY) + # TODO: optimize this? + for edge in zip(edges.row, edges.col, edges.data): + self.get_ptr().insert((edge[0], edge[1]), edge[2]) + + @cython.boundscheck(False) + @cython.wraparound(False) + def insert_batch(self, some_int[:,:] vertex_array, some_float[:] filtrations): + """Inserts k-simplices given by a sparse array in a format similar + to `torch.sparse <https://pytorch.org/docs/stable/sparse.html>`_. + The n-th simplex has vertices `vertex_array[0,n]`, ..., + `vertex_array[k,n]` and filtration value `filtrations[n]`. + If a simplex is repeated, the smallest filtration value is used. + Simplices with a repeated vertex are currently interpreted as lower + dimensional simplices, but we do not guarantee this behavior in the + future. Any time a simplex is inserted, its faces are inserted as well + if needed to preserve a simplicial complex. + + :param vertex_array: the k-simplices to insert. + :type vertex_array: numpy.array of shape (k+1,n) + :param filtrations: the filtration values. + :type filtrations: numpy.array of shape (n,) + """ + cdef vector[int] vertices = np.unique(vertex_array) + cdef Py_ssize_t k = vertex_array.shape[0] + cdef Py_ssize_t n = vertex_array.shape[1] + assert filtrations.shape[0] == n, 'inconsistent sizes for vertex_array and filtrations' + cdef Py_ssize_t i + cdef Py_ssize_t j + cdef vector[int] v + with nogil: + # Without this, it could be slow if we end up inserting vertices in a bad order (flat_map). + # NaN currently does the wrong thing + self.get_ptr().insert_batch_vertices(vertices, INFINITY) + for i in range(n): + for j in range(k): + v.push_back(vertex_array[j, i]) + self.get_ptr().insert(v, filtrations[i]) + v.clear() + def get_simplices(self): """This function returns a generator with simplices and their given filtration values. @@ -376,7 +471,7 @@ cdef class SimplexTree: """ return self.get_ptr().prune_above_filtration(filtration) - def expansion(self, max_dim): + def expansion(self, max_dimension): """Expands the simplex tree containing only its one skeleton until dimension max_dim. @@ -390,10 +485,10 @@ cdef class SimplexTree: The simplex tree must contain no simplex of dimension bigger than 1 when calling the method. - :param max_dim: The maximal dimension. - :type max_dim: int + :param max_dimension: The maximal dimension. + :type max_dimension: int """ - cdef int maxdim = max_dim + cdef int maxdim = max_dimension with nogil: self.get_ptr().expansion(maxdim) diff --git a/src/python/include/Alpha_complex_factory.h b/src/python/include/Alpha_complex_factory.h index 3d20aa8f..41eb72c1 100644 --- a/src/python/include/Alpha_complex_factory.h +++ b/src/python/include/Alpha_complex_factory.h @@ -106,7 +106,7 @@ class Exact_alpha_complex_dD final : public Abstract_alpha_complex { return alpha_complex_.create_complex(*simplex_tree, max_alpha_square, exact_version_, default_filtration_value); } - virtual std::size_t num_vertices() const { + virtual std::size_t num_vertices() const override { return alpha_complex_.num_vertices(); } @@ -141,7 +141,7 @@ class Inexact_alpha_complex_dD final : public Abstract_alpha_complex { return alpha_complex_.create_complex(*simplex_tree, max_alpha_square, false, default_filtration_value); } - virtual std::size_t num_vertices() const { + virtual std::size_t num_vertices() const override { return alpha_complex_.num_vertices(); } diff --git a/src/python/include/Simplex_tree_interface.h b/src/python/include/Simplex_tree_interface.h index 3848c5ad..0317ea39 100644 --- a/src/python/include/Simplex_tree_interface.h +++ b/src/python/include/Simplex_tree_interface.h @@ -40,6 +40,8 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> { using Complex_simplex_iterator = typename Base::Complex_simplex_iterator; using Extended_filtration_data = typename Base::Extended_filtration_data; using Boundary_simplex_iterator = typename Base::Boundary_simplex_iterator; + using Siblings = typename Base::Siblings; + using Node = typename Base::Node; typedef bool (*blocker_func_t)(Simplex simplex, void *user_data); public: @@ -62,6 +64,30 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> { return (result.second); } + void insert_matrix(double* filtrations, int n, int stride0, int stride1, double max_filtration) { + // We could delegate to insert_graph, but wrapping the matrix in a graph interface is too much work, + // and this is a bit more efficient. + auto& rm = this->root()->members_; + for(int i=0; i<n; ++i) { + char* p = reinterpret_cast<char*>(filtrations) + i * stride0; + double fv = *reinterpret_cast<double*>(p + i * stride1); + if(fv > max_filtration) continue; + auto sh = rm.emplace_hint(rm.end(), i, Node(this->root(), fv)); + Siblings* children = nullptr; + // Should we make a first pass to count the number of edges so we can reserve the right space? + for(int j=i+1; j<n; ++j) { + double fe = *reinterpret_cast<double*>(p + j * stride1); + if(fe > max_filtration) continue; + if(!children) { + children = new Siblings(this->root(), i); + sh->second.assign_children(children); + } + children->members().emplace_hint(children->members().end(), j, Node(children, fe)); + } + } + + } + // Do not interface this function, only used in alpha complex interface for complex creation bool insert_simplex(const Simplex& simplex, Filtration_value filtration = 0) { Insertion_result result = Base::insert_simplex(simplex, filtration); diff --git a/src/python/setup.py.in b/src/python/setup.py.in index 2c67c2c5..6eb0db42 100644 --- a/src/python/setup.py.in +++ b/src/python/setup.py.in @@ -48,10 +48,6 @@ ext_modules = cythonize(ext_modules, compiler_directives={'language_level': '3'} for module in pybind11_modules: my_include_dirs = include_dirs + [pybind11.get_include(False), pybind11.get_include(True)] - if module == 'hera/wasserstein': - my_include_dirs = ['@HERA_WASSERSTEIN_INCLUDE_DIR@'] + my_include_dirs - elif module == 'hera/bottleneck': - my_include_dirs = ['@HERA_BOTTLENECK_INCLUDE_DIR@'] + my_include_dirs ext_modules.append(Extension( 'gudhi.' + module.replace('/', '.'), sources = [source_dir + module + '.cc'], diff --git a/src/python/test/test_persistence_graphical_tools.py b/src/python/test/test_persistence_graphical_tools.py index c19836b7..0e2ac3f8 100644 --- a/src/python/test/test_persistence_graphical_tools.py +++ b/src/python/test/test_persistence_graphical_tools.py @@ -12,6 +12,7 @@ import gudhi as gd import numpy as np import matplotlib as plt import pytest +import warnings def test_array_handler(): @@ -71,13 +72,13 @@ def test_limit_to_max_intervals(): (0, (0.0, 0.106382)), ] # check no warnings if max_intervals equals to the diagrams number - with pytest.warns(None) as record: + with warnings.catch_warnings(): + warnings.simplefilter("error") truncated_diags = gd.persistence_graphical_tools._limit_to_max_intervals( diags, 10, key=lambda life_time: life_time[1][1] - life_time[1][0] ) # check diagrams are not sorted assert truncated_diags == diags - assert len(record) == 0 # check warning if max_intervals lower than the diagrams number with pytest.warns(UserWarning) as record: diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py index 54bafed5..2ccbfbf5 100755 --- a/src/python/test/test_simplex_tree.py +++ b/src/python/test/test_simplex_tree.py @@ -249,6 +249,7 @@ def test_make_filtration_non_decreasing(): assert st.filtration([3, 4]) == 2.0 assert st.filtration([4, 5]) == 2.0 + def test_extend_filtration(): # Inserted simplex: @@ -257,86 +258,87 @@ def test_extend_filtration(): # / \ / # o o # /2\ /3 - # o o - # 1 0 - - st = SimplexTree() - st.insert([0,2]) - st.insert([1,2]) - st.insert([0,3]) - st.insert([2,5]) - st.insert([3,4]) - st.insert([3,5]) - st.assign_filtration([0], 1.) - st.assign_filtration([1], 2.) - st.assign_filtration([2], 3.) - st.assign_filtration([3], 4.) - st.assign_filtration([4], 5.) - st.assign_filtration([5], 6.) - - assert list(st.get_filtration()) == [ - ([0, 2], 0.0), - ([1, 2], 0.0), - ([0, 3], 0.0), - ([3, 4], 0.0), - ([2, 5], 0.0), - ([3, 5], 0.0), - ([0], 1.0), - ([1], 2.0), - ([2], 3.0), - ([3], 4.0), - ([4], 5.0), - ([5], 6.0) + # o o + # 1 0 + + st = SimplexTree() + st.insert([0, 2]) + st.insert([1, 2]) + st.insert([0, 3]) + st.insert([2, 5]) + st.insert([3, 4]) + st.insert([3, 5]) + st.assign_filtration([0], 1.0) + st.assign_filtration([1], 2.0) + st.assign_filtration([2], 3.0) + st.assign_filtration([3], 4.0) + st.assign_filtration([4], 5.0) + st.assign_filtration([5], 6.0) + + assert list(st.get_filtration()) == [ + ([0, 2], 0.0), + ([1, 2], 0.0), + ([0, 3], 0.0), + ([3, 4], 0.0), + ([2, 5], 0.0), + ([3, 5], 0.0), + ([0], 1.0), + ([1], 2.0), + ([2], 3.0), + ([3], 4.0), + ([4], 5.0), + ([5], 6.0), ] - + st.extend_filtration() - - assert list(st.get_filtration()) == [ - ([6], -3.0), - ([0], -2.0), - ([1], -1.8), - ([2], -1.6), - ([0, 2], -1.6), - ([1, 2], -1.6), - ([3], -1.4), - ([0, 3], -1.4), - ([4], -1.2), - ([3, 4], -1.2), - ([5], -1.0), - ([2, 5], -1.0), - ([3, 5], -1.0), - ([5, 6], 1.0), - ([4, 6], 1.2), - ([3, 6], 1.4), + + assert list(st.get_filtration()) == [ + ([6], -3.0), + ([0], -2.0), + ([1], -1.8), + ([2], -1.6), + ([0, 2], -1.6), + ([1, 2], -1.6), + ([3], -1.4), + ([0, 3], -1.4), + ([4], -1.2), + ([3, 4], -1.2), + ([5], -1.0), + ([2, 5], -1.0), + ([3, 5], -1.0), + ([5, 6], 1.0), + ([4, 6], 1.2), + ([3, 6], 1.4), ([3, 4, 6], 1.4), - ([3, 5, 6], 1.4), - ([2, 6], 1.6), - ([2, 5, 6], 1.6), - ([1, 6], 1.8), - ([1, 2, 6], 1.8), - ([0, 6], 2.0), - ([0, 2, 6], 2.0), - ([0, 3, 6], 2.0) + ([3, 5, 6], 1.4), + ([2, 6], 1.6), + ([2, 5, 6], 1.6), + ([1, 6], 1.8), + ([1, 2, 6], 1.8), + ([0, 6], 2.0), + ([0, 2, 6], 2.0), + ([0, 3, 6], 2.0), ] - dgms = st.extended_persistence(min_persistence=-1.) + dgms = st.extended_persistence(min_persistence=-1.0) assert len(dgms) == 4 # Sort by (death-birth) descending - we are only interested in those with the longest life span for idx in range(4): - dgms[idx] = sorted(dgms[idx], key=lambda x:(-abs(x[1][0]-x[1][1]))) + dgms[idx] = sorted(dgms[idx], key=lambda x: (-abs(x[1][0] - x[1][1]))) + + assert dgms[0][0][1][0] == pytest.approx(2.0) + assert dgms[0][0][1][1] == pytest.approx(3.0) + assert dgms[1][0][1][0] == pytest.approx(5.0) + assert dgms[1][0][1][1] == pytest.approx(4.0) + assert dgms[2][0][1][0] == pytest.approx(1.0) + assert dgms[2][0][1][1] == pytest.approx(6.0) + assert dgms[3][0][1][0] == pytest.approx(6.0) + assert dgms[3][0][1][1] == pytest.approx(1.0) - assert dgms[0][0][1][0] == pytest.approx(2.) - assert dgms[0][0][1][1] == pytest.approx(3.) - assert dgms[1][0][1][0] == pytest.approx(5.) - assert dgms[1][0][1][1] == pytest.approx(4.) - assert dgms[2][0][1][0] == pytest.approx(1.) - assert dgms[2][0][1][1] == pytest.approx(6.) - assert dgms[3][0][1][0] == pytest.approx(6.) - assert dgms[3][0][1][1] == pytest.approx(1.) def test_simplices_iterator(): st = SimplexTree() - + assert st.insert([0, 1, 2], filtration=4.0) == True assert st.insert([2, 3, 4], filtration=2.0) == True @@ -346,9 +348,10 @@ def test_simplices_iterator(): print("filtration is: ", simplex[1]) assert st.filtration(simplex[0]) == simplex[1] + def test_collapse_edges(): st = SimplexTree() - + assert st.insert([0, 1], filtration=1.0) == True assert st.insert([1, 2], filtration=1.0) == True assert st.insert([2, 3], filtration=1.0) == True @@ -360,31 +363,33 @@ def test_collapse_edges(): st.collapse_edges() assert st.num_simplices() == 9 - assert st.find([0, 2]) == False # [1, 3] would be fine as well + assert st.find([0, 2]) == False # [1, 3] would be fine as well for simplex in st.get_skeleton(0): - assert simplex[1] == 1. + assert simplex[1] == 1.0 + def test_reset_filtration(): st = SimplexTree() - - assert st.insert([0, 1, 2], 3.) == True - assert st.insert([0, 3], 2.) == True - assert st.insert([3, 4, 5], 3.) == True - assert st.insert([0, 1, 6, 7], 4.) == True + + assert st.insert([0, 1, 2], 3.0) == True + assert st.insert([0, 3], 2.0) == True + assert st.insert([3, 4, 5], 3.0) == True + assert st.insert([0, 1, 6, 7], 4.0) == True # Guaranteed by construction for simplex in st.get_simplices(): - assert st.filtration(simplex[0]) >= 2. - + assert st.filtration(simplex[0]) >= 2.0 + # dimension until 5 even if simplex tree is of dimension 3 to test the limits for dimension in range(5, -1, -1): - st.reset_filtration(0., dimension) + st.reset_filtration(0.0, dimension) for simplex in st.get_skeleton(3): print(simplex) if len(simplex[0]) < (dimension) + 1: - assert st.filtration(simplex[0]) >= 2. + assert st.filtration(simplex[0]) >= 2.0 else: - assert st.filtration(simplex[0]) == 0. + assert st.filtration(simplex[0]) == 0.0 + def test_boundaries_iterator(): st = SimplexTree() @@ -400,16 +405,17 @@ def test_boundaries_iterator(): list(st.get_boundaries([])) with pytest.raises(RuntimeError): - list(st.get_boundaries([0, 4])) # (0, 4) does not exist + list(st.get_boundaries([0, 4])) # (0, 4) does not exist with pytest.raises(RuntimeError): - list(st.get_boundaries([6])) # (6) does not exist + list(st.get_boundaries([6])) # (6) does not exist + def test_persistence_intervals_in_dimension(): # Here is our triangulation of a 2-torus - taken from https://dioscuri-tda.org/Paris_TDA_Tutorial_2021.html # 0-----3-----4-----0 # | \ | \ | \ | \ | - # | \ | \ | \| \ | + # | \ | \ | \| \ | # 1-----8-----7-----1 # | \ | \ | \ | \ | # | \ | \ | \ | \ | @@ -418,50 +424,52 @@ def test_persistence_intervals_in_dimension(): # | \ | \ | \ | \ | # 0-----3-----4-----0 st = SimplexTree() - st.insert([0,1,8]) - st.insert([0,3,8]) - st.insert([3,7,8]) - st.insert([3,4,7]) - st.insert([1,4,7]) - st.insert([0,1,4]) - st.insert([1,2,5]) - st.insert([1,5,8]) - st.insert([5,6,8]) - st.insert([6,7,8]) - st.insert([2,6,7]) - st.insert([1,2,7]) - st.insert([0,2,3]) - st.insert([2,3,5]) - st.insert([3,4,5]) - st.insert([4,5,6]) - st.insert([0,4,6]) - st.insert([0,2,6]) + st.insert([0, 1, 8]) + st.insert([0, 3, 8]) + st.insert([3, 7, 8]) + st.insert([3, 4, 7]) + st.insert([1, 4, 7]) + st.insert([0, 1, 4]) + st.insert([1, 2, 5]) + st.insert([1, 5, 8]) + st.insert([5, 6, 8]) + st.insert([6, 7, 8]) + st.insert([2, 6, 7]) + st.insert([1, 2, 7]) + st.insert([0, 2, 3]) + st.insert([2, 3, 5]) + st.insert([3, 4, 5]) + st.insert([4, 5, 6]) + st.insert([0, 4, 6]) + st.insert([0, 2, 6]) st.compute_persistence(persistence_dim_max=True) - + H0 = st.persistence_intervals_in_dimension(0) - assert np.array_equal(H0, np.array([[ 0., float("inf")]])) + assert np.array_equal(H0, np.array([[0.0, float("inf")]])) H1 = st.persistence_intervals_in_dimension(1) - assert np.array_equal(H1, np.array([[ 0., float("inf")], [ 0., float("inf")]])) + assert np.array_equal(H1, np.array([[0.0, float("inf")], [0.0, float("inf")]])) H2 = st.persistence_intervals_in_dimension(2) - assert np.array_equal(H2, np.array([[ 0., float("inf")]])) + assert np.array_equal(H2, np.array([[0.0, float("inf")]])) # Test empty case assert st.persistence_intervals_in_dimension(3).shape == (0, 2) + def test_equality_operator(): st1 = SimplexTree() st2 = SimplexTree() assert st1 == st2 - st1.insert([1,2,3], 4.) + st1.insert([1, 2, 3], 4.0) assert st1 != st2 - st2.insert([1,2,3], 4.) + st2.insert([1, 2, 3], 4.0) assert st1 == st2 + def test_simplex_tree_deep_copy(): st = SimplexTree() - st.insert([1, 2, 3], 0.) + st.insert([1, 2, 3], 0.0) # compute persistence only on the original st.compute_persistence() @@ -480,14 +488,15 @@ def test_simplex_tree_deep_copy(): for a_splx in a_filt_list: assert a_splx in st_filt_list - + # test double free del st del st_copy + def test_simplex_tree_deep_copy_constructor(): st = SimplexTree() - st.insert([1, 2, 3], 0.) + st.insert([1, 2, 3], 0.0) # compute persistence only on the original st.compute_persistence() @@ -506,56 +515,132 @@ def test_simplex_tree_deep_copy_constructor(): for a_splx in a_filt_list: assert a_splx in st_filt_list - + # test double free del st del st_copy + def test_simplex_tree_constructor_exception(): with pytest.raises(TypeError): - st = SimplexTree(other = "Construction from a string shall raise an exception") + st = SimplexTree(other="Construction from a string shall raise an exception") + + +def test_create_from_array(): + a = np.array([[1, 4, 13, 6], [4, 3, 11, 5], [13, 11, 10, 12], [6, 5, 12, 2]]) + st = SimplexTree.create_from_array(a, max_filtration=5.0) + assert list(st.get_filtration()) == [([0], 1.0), ([3], 2.0), ([1], 3.0), ([0, 1], 4.0), ([1, 3], 5.0)] + + +def test_insert_edges_from_coo_matrix(): + try: + from scipy.sparse import coo_matrix + from scipy.spatial import cKDTree + except ImportError: + print("Skipping, no SciPy") + return + + st = SimplexTree() + st.insert([1, 2, 7], 7) + row = np.array([2, 5, 3]) + col = np.array([1, 4, 6]) + dat = np.array([1, 2, 3]) + edges = coo_matrix((dat, (row, col))) + st.insert_edges_from_coo_matrix(edges) + assert list(st.get_filtration()) == [ + ([1], 1.0), + ([2], 1.0), + ([1, 2], 1.0), + ([4], 2.0), + ([5], 2.0), + ([4, 5], 2.0), + ([3], 3.0), + ([6], 3.0), + ([3, 6], 3.0), + ([7], 7.0), + ([1, 7], 7.0), + ([2, 7], 7.0), + ([1, 2, 7], 7.0), + ] + + pts = np.random.rand(100, 2) + tree = cKDTree(pts) + edges = tree.sparse_distance_matrix(tree, max_distance=0.15, output_type="coo_matrix") + st = SimplexTree() + st.insert_edges_from_coo_matrix(edges) + assert 100 < st.num_simplices() < 1000 + + +def test_insert_batch(): + st = SimplexTree() + # vertices + st.insert_batch(np.array([[6, 1, 5]]), np.array([-5.0, 2.0, -3.0])) + # triangles + st.insert_batch(np.array([[2, 10], [5, 0], [6, 11]]), np.array([4.0, 0.0])) + # edges + st.insert_batch(np.array([[1, 5], [2, 5]]), np.array([1.0, 3.0])) + + assert list(st.get_filtration()) == [ + ([6], -5.0), + ([5], -3.0), + ([0], 0.0), + ([10], 0.0), + ([0, 10], 0.0), + ([11], 0.0), + ([0, 11], 0.0), + ([10, 11], 0.0), + ([0, 10, 11], 0.0), + ([1], 1.0), + ([2], 1.0), + ([1, 2], 1.0), + ([2, 5], 4.0), + ([2, 6], 4.0), + ([5, 6], 4.0), + ([2, 5, 6], 4.0), + ] + def test_expansion_with_blocker(): - st=SimplexTree() - st.insert([0,1],0) - st.insert([0,2],1) - st.insert([0,3],2) - st.insert([1,2],3) - st.insert([1,3],4) - st.insert([2,3],5) - st.insert([2,4],6) - st.insert([3,6],7) - st.insert([4,5],8) - st.insert([4,6],9) - st.insert([5,6],10) - st.insert([6],10) + st = SimplexTree() + st.insert([0, 1], 0) + st.insert([0, 2], 1) + st.insert([0, 3], 2) + st.insert([1, 2], 3) + st.insert([1, 3], 4) + st.insert([2, 3], 5) + st.insert([2, 4], 6) + st.insert([3, 6], 7) + st.insert([4, 5], 8) + st.insert([4, 6], 9) + st.insert([5, 6], 10) + st.insert([6], 10) def blocker(simplex): try: # Block all simplices that contain vertex 6 simplex.index(6) - print(simplex, ' is blocked') + print(simplex, " is blocked") return True except ValueError: - print(simplex, ' is accepted') - st.assign_filtration(simplex, st.filtration(simplex) + 1.) + print(simplex, " is accepted") + st.assign_filtration(simplex, st.filtration(simplex) + 1.0) return False st.expansion_with_blocker(2, blocker) assert st.num_simplices() == 22 assert st.dimension() == 2 - assert st.find([4,5,6]) == False - assert st.filtration([0,1,2]) == 4. - assert st.filtration([0,1,3]) == 5. - assert st.filtration([0,2,3]) == 6. - assert st.filtration([1,2,3]) == 6. + assert st.find([4, 5, 6]) == False + assert st.filtration([0, 1, 2]) == 4.0 + assert st.filtration([0, 1, 3]) == 5.0 + assert st.filtration([0, 2, 3]) == 6.0 + assert st.filtration([1, 2, 3]) == 6.0 st.expansion_with_blocker(3, blocker) assert st.num_simplices() == 23 assert st.dimension() == 3 - assert st.find([4,5,6]) == False - assert st.filtration([0,1,2]) == 4. - assert st.filtration([0,1,3]) == 5. - assert st.filtration([0,2,3]) == 6. - assert st.filtration([1,2,3]) == 6. - assert st.filtration([0,1,2,3]) == 7. + assert st.find([4, 5, 6]) == False + assert st.filtration([0, 1, 2]) == 4.0 + assert st.filtration([0, 1, 3]) == 5.0 + assert st.filtration([0, 2, 3]) == 6.0 + assert st.filtration([1, 2, 3]) == 6.0 + assert st.filtration([0, 1, 2, 3]) == 7.0 diff --git a/src/python/test/test_wasserstein_distance.py b/src/python/test/test_wasserstein_distance.py index 3a004d77..a76b6ce7 100755 --- a/src/python/test/test_wasserstein_distance.py +++ b/src/python/test/test_wasserstein_distance.py @@ -90,10 +90,11 @@ def test_get_essential_parts(): def test_warn_infty(): - assert _warn_infty(matching=False)==np.inf - c, m = _warn_infty(matching=True) - assert (c == np.inf) - assert (m is None) + with pytest.warns(UserWarning): + assert _warn_infty(matching=False)==np.inf + c, m = _warn_infty(matching=True) + assert (c == np.inf) + assert (m is None) def _basic_wasserstein(wasserstein_distance, delta, test_infinity=True, test_matching=True): |