summaryrefslogtreecommitdiff
path: root/src/python
diff options
context:
space:
mode:
authorVincent Rouvreau <vincent.rouvreau@inria.fr>2022-01-19 08:11:03 +0100
committerVincent Rouvreau <vincent.rouvreau@inria.fr>2022-01-19 08:11:03 +0100
commitf76dcd1fe954b77e1d8af368b7fe75355f944a80 (patch)
treea2508f0be2b2c8671665d82c150baf78c9380705 /src/python
parentdf575b8786b484a631ab4d298ce5d12199f3b5a7 (diff)
parentde5aa9c891ef13c9fc2b2635bcd27ab873b0057b (diff)
Merge master
Diffstat (limited to 'src/python')
-rw-r--r--src/python/CMakeLists.txt42
-rw-r--r--src/python/doc/datasets_generators.rst2
-rw-r--r--src/python/gudhi/datasets/generators/_points.cc11
-rw-r--r--src/python/gudhi/datasets/generators/points.py19
-rwxr-xr-xsrc/python/test/test_dtm.py14
5 files changed, 51 insertions, 37 deletions
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 66e37848..77c6ab50 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -14,13 +14,16 @@ function( add_GUDHI_PYTHON_lib THE_LIB )
endif(EXISTS ${THE_LIB})
endfunction( add_GUDHI_PYTHON_lib )
-function( add_GUDHI_PYTHON_lib_dir THE_LIB_DIR )
- # deals when it is not set - error on windows
- if(EXISTS ${THE_LIB_DIR})
- set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${THE_LIB_DIR}', " PARENT_SCOPE)
- else()
- message("add_GUDHI_PYTHON_lib_dir - '${THE_LIB_DIR}' does not exist")
- endif()
+function( add_GUDHI_PYTHON_lib_dir)
+ # Argument may be a list (specifically on windows with release/debug paths)
+ foreach(THE_LIB_DIR IN LISTS ARGN)
+ # deals when it is not set - error on windows
+ if(EXISTS ${THE_LIB_DIR})
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${THE_LIB_DIR}', " PARENT_SCOPE)
+ else()
+ message("add_GUDHI_PYTHON_lib_dir - '${THE_LIB_DIR}' does not exist")
+ endif()
+ endforeach()
endfunction( add_GUDHI_PYTHON_lib_dir )
# THE_TEST is the python test file name (without .py extension) containing tests functions
@@ -178,6 +181,10 @@ if(PYTHONINTERP_FOUND)
endif ()
if(CGAL_FOUND)
+ if(NOT CGAL_VERSION VERSION_LESS 5.3.0)
+ # CGAL_HEADER_ONLY has been dropped for CGAL >= 5.3. Only the header-only version is supported.
+ set(CGAL_HEADER_ONLY True)
+ endif(NOT CGAL_VERSION VERSION_LESS 5.3.0)
# Add CGAL compilation args
if(CGAL_HEADER_ONLY)
add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}")
@@ -185,7 +192,7 @@ if(PYTHONINTERP_FOUND)
else(CGAL_HEADER_ONLY)
add_gudhi_debug_info("CGAL version ${CGAL_VERSION}")
add_GUDHI_PYTHON_lib("${CGAL_LIBRARY}")
- add_GUDHI_PYTHON_lib_dir("${CGAL_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${CGAL_LIBRARIES_DIR})
message("** Add CGAL ${CGAL_LIBRARIES_DIR}")
# If CGAL is not header only, CGAL library may link with boost system,
if(CMAKE_BUILD_TYPE MATCHES Debug)
@@ -193,7 +200,7 @@ if(PYTHONINTERP_FOUND)
else()
add_GUDHI_PYTHON_lib("${Boost_SYSTEM_LIBRARY_RELEASE}")
endif()
- add_GUDHI_PYTHON_lib_dir("${Boost_LIBRARY_DIRS}")
+ add_GUDHI_PYTHON_lib_dir(${Boost_LIBRARY_DIRS})
message("** Add Boost ${Boost_LIBRARY_DIRS}")
endif(CGAL_HEADER_ONLY)
# GMP and GMPXX are not required, but if present, CGAL will link with them.
@@ -205,13 +212,13 @@ if(PYTHONINTERP_FOUND)
get_filename_component(GMP_LIBRARIES_DIR ${GMP_LIBRARIES} PATH)
message("GMP_LIBRARIES_DIR from GMP_LIBRARIES set to ${GMP_LIBRARIES_DIR}")
endif(NOT GMP_LIBRARIES_DIR)
- add_GUDHI_PYTHON_lib_dir("${GMP_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${GMP_LIBRARIES_DIR})
message("** Add gmp ${GMP_LIBRARIES_DIR}")
if(GMPXX_FOUND)
add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
add_GUDHI_PYTHON_lib("${GMPXX_LIBRARIES}")
- add_GUDHI_PYTHON_lib_dir("${GMPXX_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${GMPXX_LIBRARIES_DIR})
message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}")
endif(GMPXX_FOUND)
endif(GMP_FOUND)
@@ -224,7 +231,7 @@ if(PYTHONINTERP_FOUND)
get_filename_component(MPFR_LIBRARIES_DIR ${MPFR_LIBRARIES} PATH)
message("MPFR_LIBRARIES_DIR from MPFR_LIBRARIES set to ${MPFR_LIBRARIES_DIR}")
endif(NOT MPFR_LIBRARIES_DIR)
- add_GUDHI_PYTHON_lib_dir("${MPFR_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${MPFR_LIBRARIES_DIR})
message("** Add mpfr ${MPFR_LIBRARIES_DIR}")
endif(MPFR_FOUND)
endif(CGAL_FOUND)
@@ -245,14 +252,14 @@ if(PYTHONINTERP_FOUND)
if (TBB_FOUND AND WITH_GUDHI_USE_TBB)
add_gudhi_debug_info("TBB version ${TBB_INTERFACE_VERSION} found and used")
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ")
- if(CMAKE_BUILD_TYPE MATCHES Debug)
+ if((CMAKE_BUILD_TYPE MATCHES Debug) AND TBB_DEBUG_LIBRARY)
add_GUDHI_PYTHON_lib("${TBB_DEBUG_LIBRARY}")
add_GUDHI_PYTHON_lib("${TBB_MALLOC_DEBUG_LIBRARY}")
else()
add_GUDHI_PYTHON_lib("${TBB_RELEASE_LIBRARY}")
add_GUDHI_PYTHON_lib("${TBB_MALLOC_RELEASE_LIBRARY}")
endif()
- add_GUDHI_PYTHON_lib_dir("${TBB_LIBRARY_DIRS}")
+ add_GUDHI_PYTHON_lib_dir(${TBB_LIBRARY_DIRS})
message("** Add tbb ${TBB_LIBRARY_DIRS}")
set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ")
endif()
@@ -292,7 +299,12 @@ if(PYTHONINTERP_FOUND)
add_custom_target(python ALL DEPENDS gudhi.so
COMMENT "Do not forget to add ${CMAKE_CURRENT_BINARY_DIR}/ to your PYTHONPATH before using examples or tests")
- set(GUDHI_PYTHON_PATH_ENV "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:$ENV{PYTHONPATH}")
+ # Path separator management for windows
+ if (WIN32)
+ set(GUDHI_PYTHON_PATH_ENV "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR};$ENV{PYTHONPATH}")
+ else(WIN32)
+ set(GUDHI_PYTHON_PATH_ENV "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:$ENV{PYTHONPATH}")
+ endif(WIN32)
# Documentation generation is available through sphinx - requires all modules
# Make it first as sphinx test is by far the longest test which is nice when testing in parallel
if(SPHINX_PATH)
diff --git a/src/python/doc/datasets_generators.rst b/src/python/doc/datasets_generators.rst
index 6f36bce1..260c3882 100644
--- a/src/python/doc/datasets_generators.rst
+++ b/src/python/doc/datasets_generators.rst
@@ -42,7 +42,7 @@ Example
.. autofunction:: gudhi.datasets.generators.points.sphere
Points on a flat torus
-^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^^^
You can also generate points on a torus.
diff --git a/src/python/gudhi/datasets/generators/_points.cc b/src/python/gudhi/datasets/generators/_points.cc
index 70ce4925..82fea25b 100644
--- a/src/python/gudhi/datasets/generators/_points.cc
+++ b/src/python/gudhi/datasets/generators/_points.cc
@@ -96,7 +96,6 @@ PYBIND11_MODULE(_points, m) {
:type radius: float
:param sample: The sample type. Default and only available value is `"random"`.
:type sample: string
- :rtype: numpy array of float
:returns: the generated points on a sphere.
)pbdoc");
@@ -111,10 +110,12 @@ PYBIND11_MODULE(_points, m) {
:type dim: integer
:param sample: The sample type. Available values are: `"random"` and `"grid"`. Default value is `"random"`.
:type sample: string
- :rtype: numpy array of float.
- The shape of returned numpy array is :
- if sample is 'random' : (n_samples, 2*dim).
- if sample is 'grid' : (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
:returns: the generated points on a torus.
+
+ The shape of returned numpy array is:
+
+ If sample is 'random': (n_samples, 2*dim).
+
+ If sample is 'grid': (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
)pbdoc");
}
diff --git a/src/python/gudhi/datasets/generators/points.py b/src/python/gudhi/datasets/generators/points.py
index cf97777d..9bb2799d 100644
--- a/src/python/gudhi/datasets/generators/points.py
+++ b/src/python/gudhi/datasets/generators/points.py
@@ -19,15 +19,15 @@ def _generate_random_points_on_torus(n_samples, dim):
# Based on angles, construct points of size n_samples*dim on a circle and reshape the result in a n_samples*2*dim array
array_points = np.column_stack([np.cos(alpha), np.sin(alpha)]).reshape(-1, 2*dim)
-
+
return array_points
def _generate_grid_points_on_torus(n_samples, dim):
-
+
# Generate points on a dim-torus as a grid
n_samples_grid = int((n_samples+.5)**(1./dim)) # add .5 to avoid rounding down with numerical approximations
alpha = np.linspace(0, 2*np.pi, n_samples_grid, endpoint=False)
-
+
array_points = np.column_stack([np.cos(alpha), np.sin(alpha)])
array_points_idx = np.empty([n_samples_grid]*dim + [dim], dtype=int)
for i, x in enumerate(np.ix_(*([np.arange(n_samples_grid)]*dim))):
@@ -35,16 +35,19 @@ def _generate_grid_points_on_torus(n_samples, dim):
return array_points[array_points_idx].reshape(-1, 2*dim)
def torus(n_samples, dim, sample='random'):
- """
+ """
Generate points on a flat dim-torus in R^2dim either randomly or on a grid
-
+
:param n_samples: The number of points to be generated.
:param dim: The dimension of the torus on which points would be generated in R^2*dim.
:param sample: The sample type of the generated points. Can be 'random' or 'grid'.
:returns: numpy array containing the generated points on a torus.
- The shape of returned numpy array is:
- if sample is 'random' : (n_samples, 2*dim).
- if sample is 'grid' : (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
+
+ The shape of returned numpy array is:
+
+ If sample is 'random': (n_samples, 2*dim).
+
+ If sample is 'grid': (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
"""
if sample == 'random':
# Generate points randomly
diff --git a/src/python/test/test_dtm.py b/src/python/test/test_dtm.py
index c29471cf..e46d616c 100755
--- a/src/python/test/test_dtm.py
+++ b/src/python/test/test_dtm.py
@@ -91,13 +91,11 @@ def test_density():
def test_dtm_overflow_warnings():
pts = numpy.array([[10., 100000000000000000000000000000.], [1000., 100000000000000000000000000.]])
- impl_warn = ["keops", "hnsw"]
with warnings.catch_warnings(record=True) as w:
- for impl in impl_warn:
- dtm = DistanceToMeasure(2, q=10000, implementation=impl)
- r = dtm.fit_transform(pts)
- assert len(w) == 2
- for i in range(len(w)):
- assert issubclass(w[i].category, RuntimeWarning)
- assert "Overflow" in str(w[i].message)
+ # TODO Test "keops" implementation as well when next version of pykeops (current is 1.5) is released (should fix the problem (cf. issue #543))
+ dtm = DistanceToMeasure(2, implementation="hnsw")
+ r = dtm.fit_transform(pts)
+ assert len(w) == 1
+ assert issubclass(w[0].category, RuntimeWarning)
+ assert "Overflow" in str(w[0].message)