summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVincent Rouvreau <vincent.rouvreau@inria.fr>2022-08-10 09:25:40 +0200
committerVincent Rouvreau <vincent.rouvreau@inria.fr>2022-08-10 09:25:40 +0200
commit5fdb9e5e1ed77f7ad5a98c563fb9bfa09056271c (patch)
tree9f4036e73e8083be95153af91ad761892bc1b8b2
parent69198e9a00648aa5f8f38e1cef2c7bd6b7299dbb (diff)
parent4f83706aa1263c04cb5e8763e1e8eb6c580bed3c (diff)
Merge branch 'master' into sklearn_cubical
-rw-r--r--src/Cech_complex/include/gudhi/Cech_complex_blocker.h27
-rw-r--r--src/Contraction/include/gudhi/Skeleton_blocker_contractor.h21
-rw-r--r--src/python/gudhi/representations/vector_methods.py18
-rwxr-xr-xsrc/python/test/test_representations.py21
4 files changed, 55 insertions, 32 deletions
diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
index e7f548ba..3ea82826 100644
--- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
+++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
@@ -54,6 +54,7 @@ class Cech_blocker {
using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle;
using Filtration_value = typename SimplicialComplexForCech::Filtration_value;
+ using Simplex_key = typename SimplicialComplexForCech::Simplex_key;
template<class PointIterator>
Sphere get_sphere(PointIterator begin, PointIterator end) const {
@@ -70,7 +71,6 @@ class Cech_blocker {
* \return true if the simplex radius is greater than the Cech_complex max_radius*/
bool operator()(Simplex_handle sh) {
using Point_cloud = std::vector<Point_d>;
- CGAL::NT_converter<FT, Filtration_value> cast_to_fv;
Filtration_value radius = 0;
bool is_min_enclos_ball = false;
Point_cloud points;
@@ -78,10 +78,10 @@ class Cech_blocker {
// for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices
for (auto face_opposite_vertex : sc_ptr_->boundary_opposite_vertex_simplex_range(sh)) {
- Sphere sph;
auto k = sc_ptr_->key(face_opposite_vertex.first);
+ Simplex_key sph_key;
if(k != sc_ptr_->null_key()) {
- sph = cc_ptr_->get_cache().at(k);
+ sph_key = k;
}
else {
for (auto vertex : sc_ptr_->simplex_vertex_range(face_opposite_vertex.first)) {
@@ -90,25 +90,22 @@ class Cech_blocker {
std::clog << "#(" << vertex << ")#";
#endif // DEBUG_TRACES
}
- sph = get_sphere(points.cbegin(), points.cend());
// Put edge sphere in cache
- sc_ptr_->assign_key(face_opposite_vertex.first, cc_ptr_->get_cache().size());
- cc_ptr_->get_cache().push_back(sph);
+ sph_key = cc_ptr_->get_cache().size();
+ sc_ptr_->assign_key(face_opposite_vertex.first, sph_key);
+ cc_ptr_->get_cache().push_back(get_sphere(points.cbegin(), points.cend()));
// Clear face points
points.clear();
}
// Check if the minimal enclosing ball of current face contains the extra point/opposite vertex
+ Sphere const& sph = cc_ptr_->get_cache()[sph_key];
if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) {
+ is_min_enclos_ball = true;
+ sc_ptr_->assign_key(sh, sph_key);
+ radius = sc_ptr_->filtration(face_opposite_vertex.first);
#ifdef DEBUG_TRACES
std::clog << "center: " << sph.first << ", radius: " << radius << std::endl;
#endif // DEBUG_TRACES
- is_min_enclos_ball = true;
-#if CGAL_VERSION_NR >= 1050000000
- if(exact_) CGAL::exact(sph.second);
-#endif
- radius = std::sqrt(cast_to_fv(sph.second));
- sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size());
- cc_ptr_->get_cache().push_back(sph);
break;
}
}
@@ -121,6 +118,7 @@ class Cech_blocker {
#if CGAL_VERSION_NR >= 1050000000
if(exact_) CGAL::exact(sph.second);
#endif
+ CGAL::NT_converter<FT, Filtration_value> cast_to_fv;
radius = std::sqrt(cast_to_fv(sph.second));
sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size());
@@ -130,7 +128,8 @@ class Cech_blocker {
#ifdef DEBUG_TRACES
if (radius > cc_ptr_->max_radius()) std::clog << "radius > max_radius => expansion is blocked\n";
#endif // DEBUG_TRACES
- sc_ptr_->assign_filtration(sh, radius);
+ // Check that the filtration to be assigned (radius) would be valid
+ if (radius > sc_ptr_->filtration(sh)) sc_ptr_->assign_filtration(sh, radius);
return (radius > cc_ptr_->max_radius());
}
diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h
index 56b76318..6911ca2e 100644
--- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h
+++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h
@@ -171,8 +171,13 @@ typename GeometricSimplifiableComplex::Vertex_handle> {
Self const* algorithm_;
};
+#if CGAL_VERSION_NR < 1050500000
typedef CGAL::Modifiable_priority_queue<Edge_handle, Compare_cost, Undirected_edge_id> PQ;
- typedef typename PQ::handle pq_handle;
+#else
+ typedef CGAL::Modifiable_priority_queue<Edge_handle, Compare_cost, Undirected_edge_id, CGAL::CGAL_BOOST_PENDING_RELAXED_HEAP> PQ;
+#endif
+
+ typedef bool pq_handle;
// An Edge_data is associated with EVERY edge in the complex (collapsible or not).
@@ -196,7 +201,7 @@ typename GeometricSimplifiableComplex::Vertex_handle> {
}
bool is_in_PQ() const {
- return PQHandle_ != PQ::null_handle();
+ return PQHandle_ != false;
}
void set_PQ_handle(pq_handle h) {
@@ -204,7 +209,7 @@ typename GeometricSimplifiableComplex::Vertex_handle> {
}
void reset_PQ_handle() {
- PQHandle_ = PQ::null_handle();
+ PQHandle_ = false;
}
private:
@@ -238,16 +243,22 @@ typename GeometricSimplifiableComplex::Vertex_handle> {
}
void insert_in_PQ(Edge_handle edge, Edge_data& data) {
- data.set_PQ_handle(heap_PQ_->push(edge));
+ heap_PQ_->push(edge);
+ data.set_PQ_handle(true);
++current_num_edges_heap_;
}
void update_in_PQ(Edge_handle edge, Edge_data& data) {
+#if CGAL_VERSION_NR < 1050500000
data.set_PQ_handle(heap_PQ_->update(edge, data.PQ_handle()));
+#else
+ heap_PQ_->update(edge);
+#endif
}
void remove_from_PQ(Edge_handle edge, Edge_data& data) {
- data.set_PQ_handle(heap_PQ_->erase(edge, data.PQ_handle()));
+ heap_PQ_->erase(edge);
+ data.set_PQ_handle(false);
--current_num_edges_heap_;
}
diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py
index f8078d03..69ff5e1e 100644
--- a/src/python/gudhi/representations/vector_methods.py
+++ b/src/python/gudhi/representations/vector_methods.py
@@ -508,26 +508,20 @@ class Entropy(BaseEstimator, TransformerMixin):
new_X = BirthPersistenceTransform().fit_transform(X)
for i in range(num_diag):
- orig_diagram, diagram, num_pts_in_diag = X[i], new_X[i], X[i].shape[0]
- try:
- new_diagram = DiagramScaler(use=True, scalers=[([1], MaxAbsScaler())]).fit_transform([diagram])[0]
- except ValueError:
- # Empty persistence diagram case - https://github.com/GUDHI/gudhi-devel/issues/507
- assert len(diagram) == 0
- new_diagram = np.empty(shape = [0, 2])
-
+ orig_diagram, new_diagram, num_pts_in_diag = X[i], new_X[i], X[i].shape[0]
+
+ p = new_diagram[:,1]
+ p = p/np.sum(p)
if self.mode == "scalar":
- ent = - np.sum( np.multiply(new_diagram[:,1], np.log(new_diagram[:,1])) )
+ ent = -np.dot(p, np.log(p))
Xfit.append(np.array([[ent]]))
-
else:
ent = np.zeros(self.resolution)
for j in range(num_pts_in_diag):
[px,py] = orig_diagram[j,:2]
min_idx = np.clip(np.ceil((px - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
max_idx = np.clip(np.ceil((py - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
- for k in range(min_idx, max_idx):
- ent[k] += (-1) * new_diagram[j,1] * np.log(new_diagram[j,1])
+ ent[min_idx:max_idx]-=p[j]*np.log(p[j])
if self.normalized:
ent = ent / np.linalg.norm(ent, ord=1)
Xfit.append(np.reshape(ent,[1,-1]))
diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py
index d219ce7a..4a455bb6 100755
--- a/src/python/test/test_representations.py
+++ b/src/python/test/test_representations.py
@@ -152,7 +152,26 @@ def test_vectorization_empty_diagrams():
scv = Entropy(mode="vector", normalized=False, resolution=random_resolution)(empty_diag)
assert not np.any(scv)
assert scv.shape[0] == random_resolution
-
+
+def test_entropy_miscalculation():
+ diag_ex = np.array([[0.0,1.0], [0.0,1.0], [0.0,2.0]])
+ def pe(pd):
+ l = pd[:,1] - pd[:,0]
+ l = l/sum(l)
+ return -np.dot(l, np.log(l))
+ sce = Entropy(mode="scalar")
+ assert [[pe(diag_ex)]] == sce.fit_transform([diag_ex])
+ sce = Entropy(mode="vector", resolution=4, normalized=False)
+ pef = [-1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2),
+ -1/4*np.log(1/4)-1/4*np.log(1/4)-1/2*np.log(1/2),
+ -1/2*np.log(1/2),
+ 0.0]
+ assert all(([pef] == sce.fit_transform([diag_ex]))[0])
+ sce = Entropy(mode="vector", resolution=4, normalized=True)
+ pefN = (sce.fit_transform([diag_ex]))[0]
+ area = np.linalg.norm(pefN, ord=1)
+ assert area==1
+
def test_kernel_empty_diagrams():
empty_diag = np.empty(shape = [0, 2])
assert SlicedWassersteinDistance(num_directions=100)(empty_diag, empty_diag) == 0.