From 06ff6fac211d2823c7d14a6d2f4a4db03f48d2e3 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 26 Jan 2018 14:01:39 +0000 Subject: Seperate installation and examples from main page Move cover complex utilities from examples GIC.cpp example was not compiled, nor tested. It is removed. Persistence representation : no need to link with Boost_SYSTEM git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3164 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: cf2bfa6c6de2ed359aaa165b9f80bca7e06defb1 --- src/Nerve_GIC/utilities/CMakeLists.txt | 22 ++ .../utilities/KeplerMapperVisuFromTxtFile.py | 72 ++++ src/Nerve_GIC/utilities/Nerve.cpp | 96 +++++ src/Nerve_GIC/utilities/Nerve.txt | 63 ++++ src/Nerve_GIC/utilities/VoronoiGIC.cpp | 90 +++++ src/Nerve_GIC/utilities/km.py | 390 +++++++++++++++++++++ src/Nerve_GIC/utilities/km.py.COPYRIGHT | 26 ++ 7 files changed, 759 insertions(+) create mode 100644 src/Nerve_GIC/utilities/CMakeLists.txt create mode 100755 src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py create mode 100644 src/Nerve_GIC/utilities/Nerve.cpp create mode 100644 src/Nerve_GIC/utilities/Nerve.txt create mode 100644 src/Nerve_GIC/utilities/VoronoiGIC.cpp create mode 100755 src/Nerve_GIC/utilities/km.py create mode 100644 src/Nerve_GIC/utilities/km.py.COPYRIGHT (limited to 'src/Nerve_GIC/utilities') diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt new file mode 100644 index 00000000..a0508dc2 --- /dev/null +++ b/src/Nerve_GIC/utilities/CMakeLists.txt @@ -0,0 +1,22 @@ +cmake_minimum_required(VERSION 2.6) +project(Nerve_GIC_examples) + +if (NOT CGAL_VERSION VERSION_LESS 4.8.1) + + add_executable ( Nerve Nerve.cpp ) + add_executable ( VoronoiGIC VoronoiGIC.cpp ) + + if (TBB_FOUND) + target_link_libraries(Nerve ${TBB_LIBRARIES}) + target_link_libraries(VoronoiGIC ${TBB_LIBRARIES}) + endif() + + file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + + add_test(NAME Nerve_GIC_utilities_nerve COMMAND $ + "${CMAKE_SOURCE_DIR}/data/points/human.off" "2" "10" "0.3") + + add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $ + "${CMAKE_SOURCE_DIR}/data/points/human.off" "100") + +endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) diff --git a/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py new file mode 100755 index 00000000..d2897774 --- /dev/null +++ b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +import km +import numpy as np +from collections import defaultdict + +"""This file is part of the Gudhi Library. The Gudhi library + (Geometric Understanding in Higher Dimensions) is a generic C++ + library for computational topology. + + Author(s): Mathieu Carriere + + Copyright (C) 2017 INRIA + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" + +__author__ = "Mathieu Carriere" +__copyright__ = "Copyright (C) 2017 INRIA" +__license__ = "GPL v3" + +network = {} +mapper = km.KeplerMapper(verbose=0) +data = np.zeros((3,3)) +projected_data = mapper.fit_transform( data, projection="sum", scaler=None ) + +f = open('SC.txt','r') +nodes = defaultdict(list) +links = defaultdict(list) +custom = defaultdict(list) + +dat = f.readline() +lens = f.readline() +color = f.readline(); +param = [float(i) for i in f.readline().split(" ")] + +nums = [int(i) for i in f.readline().split(" ")] +num_nodes = nums[0] +num_edges = nums[1] + +for i in range(0,num_nodes): + point = [float(j) for j in f.readline().split(" ")] + nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ] + links[ str(int(point[0])) ] = [] + custom[ int(point[0]) ] = point[1] + +m = min([custom[i] for i in range(0,num_nodes)]) +M = max([custom[i] for i in range(0,num_nodes)]) + +for i in range(0,num_edges): + edge = [int(j) for j in f.readline().split(" ")] + links[ str(edge[0]) ].append( str(edge[1]) ) + links[ str(edge[1]) ].append( str(edge[0]) ) + +network["nodes"] = nodes +network["links"] = links +network["meta"] = lens + +mapper.visualize(network, color_function = color, path_html="SC.html", title=dat, +graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0, +height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M) diff --git a/src/Nerve_GIC/utilities/Nerve.cpp b/src/Nerve_GIC/utilities/Nerve.cpp new file mode 100644 index 00000000..6abdedc7 --- /dev/null +++ b/src/Nerve_GIC/utilities/Nerve.cpp @@ -0,0 +1,96 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include + +void usage(int nbArgs, char *const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [--v] \n"; + std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 --v \n"; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if ((argc != 5) && (argc != 6)) usage(argc, argv[0]); + + using Point = std::vector; + + std::string off_file_name(argv[1]); + int coord = atoi(argv[2]); + int resolution = atoi(argv[3]); + double gain = atof(argv[4]); + bool verb = 0; + if (argc == 6) verb = 1; + + // -------------------------------- + // Init of a Nerve from an OFF file + // -------------------------------- + + Gudhi::cover_complex::Cover_complex SC; + SC.set_verbose(verb); + + bool check = SC.read_point_cloud(off_file_name); + + if (!check) { + std::cout << "Incorrect OFF file." << std::endl; + } else { + SC.set_type("Nerve"); + + SC.set_color_from_coordinate(coord); + SC.set_function_from_coordinate(coord); + + SC.set_graph_from_OFF(); + SC.set_resolution_with_interval_number(resolution); + SC.set_gain(gain); + SC.set_cover_from_function(); + + SC.find_simplices(); + + SC.write_info(); + + Gudhi::Simplex_tree<> stree; + SC.create_complex(stree); + SC.compute_PD(); + + // ---------------------------------------------------------------------------- + // Display information about the graph induced complex + // ---------------------------------------------------------------------------- + + if (verb) { + std::cout << "Nerve is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - " + << stree.num_vertices() << " vertices." << std::endl; + + std::cout << "Iterator on Nerve simplices" << std::endl; + for (auto f_simplex : stree.filtration_simplex_range()) { + for (auto vertex : stree.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } + } + } + + return 0; +} diff --git a/src/Nerve_GIC/utilities/Nerve.txt b/src/Nerve_GIC/utilities/Nerve.txt new file mode 100644 index 00000000..839ff45e --- /dev/null +++ b/src/Nerve_GIC/utilities/Nerve.txt @@ -0,0 +1,63 @@ +Min function value = -0.979672 and Max function value = 0.816414 +Interval 0 = [-0.979672, -0.761576] +Interval 1 = [-0.838551, -0.581967] +Interval 2 = [-0.658942, -0.402359] +Interval 3 = [-0.479334, -0.22275] +Interval 4 = [-0.299725, -0.0431415] +Interval 5 = [-0.120117, 0.136467] +Interval 6 = [0.059492, 0.316076] +Interval 7 = [0.239101, 0.495684] +Interval 8 = [0.418709, 0.675293] +Interval 9 = [0.598318, 0.816414] +Computing preimages... +Computing connected components... +.txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox. +5 interval(s) in dimension 0: + [-0.909111, 0.00817529] + [-0.171433, 0.367392] + [-0.171433, 0.367392] + [-0.909111, 0.745853] +0 interval(s) in dimension 1: +Nerve is of dimension 1 - 41 simplices - 21 vertices. +Iterator on Nerve simplices +1 +0 +4 +4 0 +2 +2 1 +8 +8 2 +5 +5 4 +9 +9 8 +13 +13 5 +14 +14 9 +19 +19 13 +25 +32 +20 +32 20 +33 +33 25 +26 +26 14 +26 19 +42 +42 26 +34 +34 33 +27 +27 20 +35 +35 27 +35 34 +42 35 +44 +44 35 +54 +54 44 \ No newline at end of file diff --git a/src/Nerve_GIC/utilities/VoronoiGIC.cpp b/src/Nerve_GIC/utilities/VoronoiGIC.cpp new file mode 100644 index 00000000..32431cc2 --- /dev/null +++ b/src/Nerve_GIC/utilities/VoronoiGIC.cpp @@ -0,0 +1,90 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Mathieu Carrière + * + * Copyright (C) 2017 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include + +void usage(int nbArgs, char *const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " filename.off N [--v] \n"; + std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 --v \n"; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if ((argc != 3) && (argc != 4)) usage(argc, argv[0]); + + using Point = std::vector; + + std::string off_file_name(argv[1]); + int m = atoi(argv[2]); + bool verb = 0; + if (argc == 4) verb = 1; + + // ---------------------------------------------------------------------------- + // Init of a graph induced complex from an OFF file + // ---------------------------------------------------------------------------- + + Gudhi::cover_complex::Cover_complex GIC; + GIC.set_verbose(verb); + + bool check = GIC.read_point_cloud(off_file_name); + + if (!check) { + std::cout << "Incorrect OFF file." << std::endl; + } else { + GIC.set_type("GIC"); + + GIC.set_color_from_coordinate(); + + GIC.set_graph_from_OFF(); + GIC.set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m); + + GIC.find_simplices(); + + GIC.plot_OFF(); + + Gudhi::Simplex_tree<> stree; + GIC.create_complex(stree); + + // ---------------------------------------------------------------------------- + // Display information about the graph induced complex + // ---------------------------------------------------------------------------- + + if (verb) { + std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() + << " simplices - " << stree.num_vertices() << " vertices." << std::endl; + + std::cout << "Iterator on graph induced complex simplices" << std::endl; + for (auto f_simplex : stree.filtration_simplex_range()) { + for (auto vertex : stree.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } + } + } + + return 0; +} diff --git a/src/Nerve_GIC/utilities/km.py b/src/Nerve_GIC/utilities/km.py new file mode 100755 index 00000000..53024aab --- /dev/null +++ b/src/Nerve_GIC/utilities/km.py @@ -0,0 +1,390 @@ +from __future__ import division +import numpy as np +from collections import defaultdict +import json +import itertools +from sklearn import cluster, preprocessing, manifold +from datetime import datetime +import sys + +class KeplerMapper(object): + # With this class you can build topological networks from (high-dimensional) data. + # + # 1) Fit a projection/lens/function to a dataset and transform it. + # For instance "mean_of_row(x) for x in X" + # 2) Map this projection with overlapping intervals/hypercubes. + # Cluster the points inside the interval + # (Note: we cluster on the inverse image/original data to lessen projection loss). + # If two clusters/nodes have the same members (due to the overlap), then: + # connect these with an edge. + # 3) Visualize the network using HTML and D3.js. + # + # functions + # --------- + # fit_transform: Create a projection (lens) from a dataset + # map: Apply Mapper algorithm on this projection and build a simplicial complex + # visualize: Turns the complex dictionary into a HTML/D3.js visualization + + def __init__(self, verbose=2): + self.verbose = verbose + + self.chunk_dist = [] + self.overlap_dist = [] + self.d = [] + self.nr_cubes = 0 + self.overlap_perc = 0 + self.clusterer = False + + def fit_transform(self, X, projection="sum", scaler=preprocessing.MinMaxScaler()): + # Creates the projection/lens from X. + # + # Input: X. Input features as a numpy array. + # Output: projected_X. original data transformed to a projection (lens). + # + # parameters + # ---------- + # projection: Projection parameter is either a string, + # a scikit class with fit_transform, like manifold.TSNE(), + # or a list of dimension indices. + # scaler: if None, do no scaling, else apply scaling to the projection + # Default: Min-Max scaling + + self.scaler = scaler + self.projection = str(projection) + + # Detect if projection is a class (for scikit-learn) + #if str(type(projection))[1:6] == "class": #TODO: de-ugly-fy + # reducer = projection + # if self.verbose > 0: + # try: + # projection.set_params(**{"verbose":self.verbose}) + # except: + # pass + # print("\n..Projecting data using: \n\t%s\n"%str(projection)) + # X = reducer.fit_transform(X) + + # Detect if projection is a string (for standard functions) + if isinstance(projection, str): + if self.verbose > 0: + print("\n..Projecting data using: %s"%(projection)) + # Stats lenses + if projection == "sum": # sum of row + X = np.sum(X, axis=1).reshape((X.shape[0],1)) + if projection == "mean": # mean of row + X = np.mean(X, axis=1).reshape((X.shape[0],1)) + if projection == "median": # mean of row + X = np.median(X, axis=1).reshape((X.shape[0],1)) + if projection == "max": # max of row + X = np.max(X, axis=1).reshape((X.shape[0],1)) + if projection == "min": # min of row + X = np.min(X, axis=1).reshape((X.shape[0],1)) + if projection == "std": # std of row + X = np.std(X, axis=1).reshape((X.shape[0],1)) + + if projection == "dist_mean": # Distance of x to mean of X + X_mean = np.mean(X, axis=0) + X = np.sum(np.sqrt((X - X_mean)**2), axis=1).reshape((X.shape[0],1)) + + # Detect if projection is a list (with dimension indices) + if isinstance(projection, list): + if self.verbose > 0: + print("\n..Projecting data using: %s"%(str(projection))) + X = X[:,np.array(projection)] + + # Scaling + if scaler is not None: + if self.verbose > 0: + print("\n..Scaling with: %s\n"%str(scaler)) + X = scaler.fit_transform(X) + + return X + + def map(self, projected_X, inverse_X=None, clusterer=cluster.DBSCAN(eps=0.5,min_samples=3), nr_cubes=10, overlap_perc=0.1): + # This maps the data to a simplicial complex. Returns a dictionary with nodes and links. + # + # Input: projected_X. A Numpy array with the projection/lens. + # Output: complex. A dictionary with "nodes", "links" and "meta information" + # + # parameters + # ---------- + # projected_X projected_X. A Numpy array with the projection/lens. Required. + # inverse_X Numpy array or None. If None then the projection itself is used for clustering. + # clusterer Scikit-learn API compatible clustering algorithm. Default: DBSCAN + # nr_cubes Int. The number of intervals/hypercubes to create. + # overlap_perc Float. The percentage of overlap "between" the intervals/hypercubes. + + start = datetime.now() + + # Helper function + def cube_coordinates_all(nr_cubes, nr_dimensions): + # Helper function to get origin coordinates for our intervals/hypercubes + # Useful for looping no matter the number of cubes or dimensions + # Example: if there are 4 cubes per dimension and 3 dimensions + # return the bottom left (origin) coordinates of 64 hypercubes, + # as a sorted list of Numpy arrays + # TODO: elegance-ify... + l = [] + for x in range(nr_cubes): + l += [x] * nr_dimensions + return [np.array(list(f)) for f in sorted(set(itertools.permutations(l,nr_dimensions)))] + + nodes = defaultdict(list) + links = defaultdict(list) + complex = {} + self.nr_cubes = nr_cubes + self.clusterer = clusterer + self.overlap_perc = overlap_perc + + if self.verbose > 0: + print("Mapping on data shaped %s using dimensions\n"%(str(projected_X.shape))) + + # If inverse image is not provided, we use the projection as the inverse image (suffer projection loss) + if inverse_X is None: + inverse_X = projected_X + + # We chop up the min-max column ranges into 'nr_cubes' parts + self.chunk_dist = (np.max(projected_X, axis=0) - np.min(projected_X, axis=0))/nr_cubes + + # We calculate the overlapping windows distance + self.overlap_dist = self.overlap_perc * self.chunk_dist + + # We find our starting point + self.d = np.min(projected_X, axis=0) + + # Use a dimension index array on the projected X + # (For now this uses the entire dimensionality, but we keep for experimentation) + di = np.array([x for x in range(projected_X.shape[1])]) + + # Prefix'ing the data with ID's + ids = np.array([x for x in range(projected_X.shape[0])]) + projected_X = np.c_[ids,projected_X] + inverse_X = np.c_[ids,inverse_X] + + # Subdivide the projected data X in intervals/hypercubes with overlap + if self.verbose > 0: + total_cubes = len(cube_coordinates_all(nr_cubes,projected_X.shape[1])) + print("Creating %s hypercubes."%total_cubes) + + for i, coor in enumerate(cube_coordinates_all(nr_cubes,di.shape[0])): + # Slice the hypercube + hypercube = projected_X[ np.invert(np.any((projected_X[:,di+1] >= self.d[di] + (coor * self.chunk_dist[di])) & + (projected_X[:,di+1] < self.d[di] + (coor * self.chunk_dist[di]) + self.chunk_dist[di] + self.overlap_dist[di]) == False, axis=1 )) ] + + if self.verbose > 1: + print("There are %s points in cube_%s / %s with starting range %s"% + (hypercube.shape[0],i,total_cubes,self.d[di] + (coor * self.chunk_dist[di]))) + + # If at least one sample inside the hypercube + if hypercube.shape[0] > 0: + # Cluster the data point(s) in the cube, skipping the id-column + # Note that we apply clustering on the inverse image (original data samples) that fall inside the cube. + inverse_x = inverse_X[[int(nn) for nn in hypercube[:,0]]] + + clusterer.fit(inverse_x[:,1:]) + + if self.verbose > 1: + print("Found %s clusters in cube_%s\n"%(np.unique(clusterer.labels_[clusterer.labels_ > -1]).shape[0],i)) + + #Now for every (sample id in cube, predicted cluster label) + for a in np.c_[hypercube[:,0],clusterer.labels_]: + if a[1] != -1: #if not predicted as noise + cluster_id = str(coor[0])+"_"+str(i)+"_"+str(a[1])+"_"+str(coor)+"_"+str(self.d[di] + (coor * self.chunk_dist[di])) # TODO: de-rudimentary-ify + nodes[cluster_id].append( int(a[0]) ) # Append the member id's as integers + else: + if self.verbose > 1: + print("Cube_%s is empty.\n"%(i)) + + # Create links when clusters from different hypercubes have members with the same sample id. + candidates = itertools.combinations(nodes.keys(),2) + for candidate in candidates: + # if there are non-unique members in the union + if len(nodes[candidate[0]]+nodes[candidate[1]]) != len(set(nodes[candidate[0]]+nodes[candidate[1]])): + links[candidate[0]].append( candidate[1] ) + + # Reporting + if self.verbose > 0: + nr_links = 0 + for k in links: + nr_links += len(links[k]) + print("\ncreated %s edges and %s nodes in %s."%(nr_links,len(nodes),str(datetime.now()-start))) + + complex["nodes"] = nodes + complex["links"] = links + complex["meta"] = self.projection + + return complex + + def visualize(self, complex, color_function="", path_html="mapper_visualization_output.html", title="My Data", + graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=None, width_html=0, + height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=0,gain=0,minimum=0,maximum=0): + # Turns the dictionary 'complex' in a html file with d3.js + # + # Input: complex. Dictionary (output from calling .map()) + # Output: a HTML page saved as a file in 'path_html'. + # + # parameters + # ---------- + # color_function string. Not fully implemented. Default: "" (distance to origin) + # path_html file path as string. Where to save the HTML page. + # title string. HTML page document title and first heading. + # graph_link_distance int. Edge length. + # graph_gravity float. "Gravity" to center of layout. + # graph_charge int. charge between nodes. + # custom_tooltips None or Numpy Array. You could use "y"-label array for this. + # width_html int. Width of canvas. Default: 0 (full width) + # height_html int. Height of canvas. Default: 0 (full height) + # show_tooltips bool. default:True + # show_title bool. default:True + # show_meta bool. default:True + + # Format JSON for D3 graph + json_s = {} + json_s["nodes"] = [] + json_s["links"] = [] + k2e = {} # a key to incremental int dict, used for id's when linking + + for e, k in enumerate(complex["nodes"]): + # Tooltip and node color formatting, TODO: de-mess-ify + if custom_tooltips is not None: + tooltip_s = "

Cluster %s

"%k + " ".join(str(custom_tooltips[complex["nodes"][k][0]]).split(" ")) + if maximum == minimum: + tooltip_i = 0 + else: + tooltip_i = int(30*(custom_tooltips[complex["nodes"][k][0]]-minimum)/(maximum-minimum)) + json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(complex["nodes"][k][2])), "color": tooltip_i}) + else: + tooltip_s = "

Cluster %s

Contains %s members."%(k,len(complex["nodes"][k])) + json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(len(complex["nodes"][k]))), "color": str(k.split("_")[0])}) + k2e[k] = e + for k in complex["links"]: + for link in complex["links"][k]: + json_s["links"].append({"source": k2e[k], "target":k2e[link],"value":1}) + + # Width and height of graph in HTML output + if width_html == 0: + width_css = "100%" + width_js = 'document.getElementById("holder").offsetWidth-20' + else: + width_css = "%spx" % width_html + width_js = "%s" % width_html + if height_html == 0: + height_css = "100%" + height_js = 'document.getElementById("holder").offsetHeight-20' + else: + height_css = "%spx" % height_html + height_js = "%s" % height_html + + # Whether to show certain UI elements or not + if show_tooltips == False: + tooltips_display = "display: none;" + else: + tooltips_display = "" + + if show_meta == False: + meta_display = "display: none;" + else: + meta_display = "" + + if show_title == False: + title_display = "display: none;" + else: + title_display = "" + + with open(path_html,"wb") as outfile: + html = """ + + + %s | KeplerMapper + + + +
+

%s

+

+ Lens
%s

+ Length of intervals
%s

+ Overlap percentage
%s%%

+ Color Function
%s +

+
+ + """%(title,width_css, height_css, title_display, meta_display, tooltips_display, title,complex["meta"],res,gain*100,color_function,width_js,height_js,graph_charge,graph_link_distance,graph_gravity,json.dumps(json_s)) + outfile.write(html.encode("utf-8")) + if self.verbose > 0: + print("\nWrote d3.js graph to '%s'"%path_html) diff --git a/src/Nerve_GIC/utilities/km.py.COPYRIGHT b/src/Nerve_GIC/utilities/km.py.COPYRIGHT new file mode 100644 index 00000000..bef7b121 --- /dev/null +++ b/src/Nerve_GIC/utilities/km.py.COPYRIGHT @@ -0,0 +1,26 @@ +km.py is a fork of https://github.com/MLWave/kepler-mapper. +Only the visualization part has been kept (Mapper part has been removed). + +This file has te following Copyright : + +The MIT License (MIT) + +Copyright (c) 2015 Triskelion - HJ van Veen - info@mlwave.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. -- cgit v1.2.3 From c2d46e0e6cfea1927875219bd5e03962cef7b010 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Mon, 29 Jan 2018 16:31:18 +0000 Subject: Fix doc issue for utilities part on the web site git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3176 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 3bafd9e13a5646ca93023144705d3ac7ed7c9b3a --- src/Alpha_complex/utilities/README | 148 ++++++++++----------- src/Bitmap_cubical_complex/utilities/README | 42 ++++-- src/Bottleneck_distance/utilities/README | 32 ++++- src/Nerve_GIC/doc/Intro_graph_induced_complex.h | 6 +- src/Nerve_GIC/include/gudhi/GIC.h | 11 +- .../utilities/KeplerMapperVisuFromTxtFile.py | 97 ++++++++------ src/Nerve_GIC/utilities/covercomplex.md | 73 ++++++++++ src/Rips_complex/utilities/README | 67 ++++------ src/Witness_complex/utilities/README | 70 +++++----- src/common/utilities/README | 46 +++++-- 10 files changed, 365 insertions(+), 227 deletions(-) create mode 100644 src/Nerve_GIC/utilities/covercomplex.md (limited to 'src/Nerve_GIC/utilities') diff --git a/src/Alpha_complex/utilities/README b/src/Alpha_complex/utilities/README index 1cd2ca95..56bce602 100644 --- a/src/Alpha_complex/utilities/README +++ b/src/Alpha_complex/utilities/README @@ -1,16 +1,35 @@ -# Alpha_complex # - -## `alpha_complex_3d_persistence` ## +--- +layout: page +title: "Alpha complex" +meta_title: "alphacomplex" +subheadline: "" +teaser: "" +permalink: "/alphacomplex/" +--- +{::comment} +These flags above are here for web site generation, please let them. +cf. https://gitlab.inria.fr/GUDHI/website +Must be in conformity with _data/navigation.yml +{:/comment} + + + +## alpha_complex_3d_persistence ## This program computes the persistent homology with coefficient field Z/pZ of the 3D alpha complex built from a 3D point cloud. The output diagram contains one bar per line, written with the convention: -`p dim birth death` +``` +p dim birth death +``` where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number). **Usage** -`alpha_complex_3d_persistence [options] ` -where -`` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). + +``` + alpha_complex_3d_persistence [options] +``` + +where `` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). **Allowed options** @@ -20,43 +39,38 @@ where * `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. **Example** -`alpha_complex_3d_persistence ../../data/points/tore3D_300.off -p 2 -m 0.45` -outputs: ``` -Simplex_tree dim: 3 -2 0 0 inf -2 1 0.0682162 1.0001 -2 1 0.0934117 1.00003 -2 2 0.56444 1.03938 +alpha_complex_3d_persistence ../../data/points/tore3D_300.off -p 2 -m 0.45 ``` -Here we retrieve expected Betti numbers on a tore 3D: -``` -Betti numbers[0] = 1 -Betti numbers[1] = 2 -Betti numbers[2] = 1 -``` +N.B.: -N.B.: * `alpha_complex_3d_persistence` only accepts OFF files in dimension 3. * Filtration values are alpha square values. +## exact_alpha_complex_3d_persistence ## + +Same as `alpha_complex_3d_persistence`, but using exact computation. +It is slower, but it is necessary when points are on a grid for instance. -## `exact_alpha_complex_3d_persistence` ## -Same as `alpha_complex_3d_persistence`, but using exact computation. It is slower, but it is necessary when points are on a grid for instance. +## weighted_alpha_complex_3d_persistence ## -## `weighted_alpha_complex_3d_persistence` ## Same as `alpha_complex_3d_persistence`, but using weighted points. **Usage** -`weighted_alpha_complex_3d_persistence [options] ` + +``` + weighted_alpha_complex_3d_persistence [options] +``` + where -`` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). -`` is the path to the file containing the weights of the points (one value per line). + +* `` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). +* `` is the path to the file containing the weights of the points (one value per line). **Allowed options** @@ -66,32 +80,33 @@ where * `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. **Example** -`weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off ../../data/points/tore3D_300.weights -p 2 -m 0.45` -outputs: ``` -Simplex_tree dim: 3 -2 0 -1 inf -2 1 -0.931784 0.000103311 -2 1 -0.906588 2.60165e-05 -2 2 -0.43556 0.0393798 + weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off ../../data/points/tore3D_300.weights -p 2 -m 0.45 ``` + N.B.: + * Weights values are explained on CGAL [Alpha shape](https://doc.cgal.org/latest/Alpha_shapes_3/index.html#title0) and [Regular triangulation](https://doc.cgal.org/latest/Triangulation_3/index.html#Triangulation3secclassRegulartriangulation) documentation. * Filtration values are alpha square values. -## `periodic_alpha_complex_3d_persistence` ## +## periodic_alpha_complex_3d_persistence ## Same as `alpha_complex_3d_persistence`, but using periodic alpha shape 3d. Refer to the [CGAL's 3D Periodic Triangulations User Manual](https://doc.cgal.org/latest/Periodic_3_triangulation_3/index.html) for more details. **Usage** -`periodic_alpha_complex_3d_persistence [options] ` + +``` + periodic_alpha_complex_3d_persistence [options] +``` + where -`` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). -`` is the path to the file describing the periodic domain. It must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid). + +* `` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). +* `` is the path to the file describing the periodic domain. It must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid). **Allowed options** @@ -102,46 +117,36 @@ where **Example** -`periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0` -outputs: ``` -Periodic Delaunay computed. -Simplex_tree dim: 3 -3 0 0 inf -3 1 0.0025 inf -3 1 0.0025 inf -3 1 0.0025 inf -3 2 0.005 inf -3 2 0.005 inf -3 2 0.005 inf -3 3 0.0075 inf +periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0 ``` -Here we retrieve expected Betti numbers on an 3D iso-oriented cuboids: -``` -Betti numbers[0] = 1 -Betti numbers[1] = 3 -Betti numbers[2] = 3 -Betti numbers[3] = 1 -``` +N.B.: -N.B.: * Cuboid file must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid). * Filtration values are alpha square values. +## alpha_complex_persistence ## -## `alpha_complex_persistence` ## -This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud. The output diagram contains one bar per line, written with the convention: +This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud. +The output diagram contains one bar per line, written with the convention: -`p dim birth death` +``` + p dim birth death +``` -where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number). +where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, +and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number). **Usage** -`alpha_complex_persistence [options] ` + +``` + alpha_complex_persistence [options] +``` + where `` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). @@ -154,24 +159,11 @@ where * `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. **Example** -`alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off` -outputs: ``` -Alpha complex is of dimension 3 - 9273 simplices - 300 vertices. -Simplex_tree dim: 3 -2 0 0 inf -2 1 0.0682162 1.0001 -2 1 0.0934117 1.00003 -2 2 0.56444 1.03938 -``` - -Here we retrieve expected Betti numbers on a tore 3D: -``` -Betti numbers[0] = 1 -Betti numbers[1] = 2 -Betti numbers[2] = 1 + alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off ``` N.B.: + * Filtration values are alpha square values. diff --git a/src/Bitmap_cubical_complex/utilities/README b/src/Bitmap_cubical_complex/utilities/README index ddff7034..393b2654 100644 --- a/src/Bitmap_cubical_complex/utilities/README +++ b/src/Bitmap_cubical_complex/utilities/README @@ -1,18 +1,40 @@ -# Bitmap_cubical_complex # +--- +layout: page +title: "Bitmap cubical complex" +meta_title: "cubicalcomplex" +subheadline: "" +teaser: "" +permalink: "/cubicalcomplex/" +--- +{::comment} +These flags above are here for web site generation, please let them. +cf. https://gitlab.inria.fr/GUDHI/website +Must be in conformity with _data/navigation.yml +{:/comment} -## `cubical_complex_persistence` ## -This program computes persistent homology, by using the Bitmap_cubical_complex class, of cubical complexes provided in text files in Perseus style. See [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPerseus) for a description of the file format. -Example: +## cubical_complex_persistence ## +This program computes persistent homology, by using the Bitmap_cubical_complex class, of cubical complexes provided in text files in Perseus style. +See [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPerseus) for a description of the file format. -* Create a Cubical Complex from the Perseus style file `CubicalTwoSphere.txt`, computes Persistence cohomology from it and writes the results in a persistence file `CubicalTwoSphere.txt_persistence`: -`cubical_complex_persistence data/bitmap/CubicalTwoSphere.txt` +**Example** -## `periodic_cubical_complex_persistence` ## +``` + cubical_complex_persistence data/bitmap/CubicalTwoSphere.txt +``` + +* Creates a Cubical Complex from the Perseus style file `CubicalTwoSphere.txt`, +computes Persistence cohomology from it and writes the results in a persistence file `CubicalTwoSphere.txt_persistence`. + +## periodic_cubical_complex_persistence ## Same as above, but with periodic boundary conditions. -Example: +**Example** + +``` + periodic_cubical_complex_persistence data/bitmap/3d_torus.txt +``` -* Create a Periodical Cubical Complex from the Perseus style file `3d_torus.txt`, computes Persistence cohomology from it and writes the results in a persistence file `3d_torus.txt_persistence`: -`periodic_cubical_complex_persistence data/bitmap/3d_torus.txt` +* Creates a Periodical Cubical Complex from the Perseus style file `3d_torus.txt`, +computes Persistence cohomology from it and writes the results in a persistence file `3d_torus.txt_persistence`. diff --git a/src/Bottleneck_distance/utilities/README b/src/Bottleneck_distance/utilities/README index d9fdd252..04e1c4bd 100644 --- a/src/Bottleneck_distance/utilities/README +++ b/src/Bottleneck_distance/utilities/README @@ -1,10 +1,30 @@ -# Bottleneck_distance # +--- +layout: page +title: "Bottleneck distance" +meta_title: "bottleneckdistance" +subheadline: "" +teaser: "" +permalink: "/bottleneckdistance/" +--- +{::comment} +These flags above are here for web site generation, please let them. +cf. https://gitlab.inria.fr/GUDHI/website +Must be in conformity with _data/navigation.yml +{:/comment} + + + +## bottleneck_read_file_example ## -## `bottleneck_read_file_example` ## This program computes the Bottleneck distance between two persistence diagram files. -Usage: -`bottleneck_read_file_example []` +**Usage** + +``` + bottleneck_read_file_example [] +``` + where -`` and `` must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPers). -`` is an error bound on the bottleneck distance (set by default to the smallest positive double value). + +* `` and `` must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPers). +* `` is an error bound on the bottleneck distance (set by default to the smallest positive double value). diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h index f2409087..474f0f0e 100644 --- a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h +++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h @@ -77,8 +77,8 @@ namespace cover_complex { * * \include Nerve_GIC/Nerve.txt * - * The program also writes a file SC.txt. The first three lines in this file are the location of the input point cloud - * and the function used to compute the cover. + * The program also writes a file ../../data/points/human_sc.txt. The first three lines in this file are the location + * of the input point cloud and the function used to compute the cover. * The fourth line contains the number of vertices nv and edges ne of the Nerve. * The next nv lines represent the vertices. Each line contains the vertex ID, * the number of data points it contains, and their average color function value. @@ -118,7 +118,7 @@ namespace cover_complex { * * the program outputs SC.off. Using e.g. * - * \code $> geomview SC.off + * \code $> geomview ../../data/points/human_sc.off * \endcode * * one can obtain the following visualization: diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h index 58831bbf..ff95b913 100644 --- a/src/Nerve_GIC/include/gudhi/GIC.h +++ b/src/Nerve_GIC/include/gudhi/GIC.h @@ -934,7 +934,7 @@ class Cover_complex { } graphic << "}"; graphic.close(); - std::cout << ".dot file generated. It can be visualized with e.g. neato." << std::endl; + std::cout << mapp << " file generated. It can be visualized with e.g. neato." << std::endl; } public: // Create a .txt file that can be compiled with KeplerMapper. @@ -944,7 +944,7 @@ class Cover_complex { void write_info() { int num_simplices = simplices.size(); int num_edges = 0; - std::string mapp = point_cloud_name + "_sc.dot"; + std::string mapp = point_cloud_name + "_sc.txt"; std::ofstream graphic(mapp.c_str()); for (int i = 0; i < num_simplices; i++) @@ -970,7 +970,8 @@ class Cover_complex { if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask) graphic << name2id[simplices[i][0]] << " " << name2id[simplices[i][1]] << std::endl; graphic.close(); - std::cout << ".txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox." + std::cout << mapp + << " generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox." << std::endl; } @@ -988,7 +989,7 @@ class Cover_complex { std::vector > edges, faces; int numsimplices = simplices.size(); - std::string mapp = point_cloud_name + "_sc.dot"; + std::string mapp = point_cloud_name + "_sc.off"; std::ofstream graphic(mapp.c_str()); graphic << "OFF" << std::endl; @@ -1016,7 +1017,7 @@ class Cover_complex { for (int i = 0; i < numfaces; i++) graphic << 3 << " " << faces[i][0] << " " << faces[i][1] << " " << faces[i][2] << std::endl; graphic.close(); - std::cout << ".off generated. It can be visualized with e.g. geomview." << std::endl; + std::cout << mapp << " generated. It can be visualized with e.g. geomview." << std::endl; } // ******************************************************************************************************************* diff --git a/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py index d2897774..c811f610 100755 --- a/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py +++ b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py @@ -3,6 +3,7 @@ import km import numpy as np from collections import defaultdict +import argparse """This file is part of the Gudhi Library. The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic C++ @@ -30,43 +31,59 @@ __author__ = "Mathieu Carriere" __copyright__ = "Copyright (C) 2017 INRIA" __license__ = "GPL v3" -network = {} -mapper = km.KeplerMapper(verbose=0) -data = np.zeros((3,3)) -projected_data = mapper.fit_transform( data, projection="sum", scaler=None ) - -f = open('SC.txt','r') -nodes = defaultdict(list) -links = defaultdict(list) -custom = defaultdict(list) - -dat = f.readline() -lens = f.readline() -color = f.readline(); -param = [float(i) for i in f.readline().split(" ")] - -nums = [int(i) for i in f.readline().split(" ")] -num_nodes = nums[0] -num_edges = nums[1] - -for i in range(0,num_nodes): - point = [float(j) for j in f.readline().split(" ")] - nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ] - links[ str(int(point[0])) ] = [] - custom[ int(point[0]) ] = point[1] - -m = min([custom[i] for i in range(0,num_nodes)]) -M = max([custom[i] for i in range(0,num_nodes)]) - -for i in range(0,num_edges): - edge = [int(j) for j in f.readline().split(" ")] - links[ str(edge[0]) ].append( str(edge[1]) ) - links[ str(edge[1]) ].append( str(edge[0]) ) - -network["nodes"] = nodes -network["links"] = links -network["meta"] = lens - -mapper.visualize(network, color_function = color, path_html="SC.html", title=dat, -graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0, -height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M) +parser = argparse.ArgumentParser(description='Creates an html Keppler Mapper ' + 'file to visualize a SC.txt file.', + epilog='Example: ' + './KeplerMapperVisuFromTxtFile.py ' + '-f ../../data/points/human.off_sc.txt' + '- Constructs an human.off_sc.html file.') +parser.add_argument("-f", "--file", type=str, required=True) + +args = parser.parse_args() + +with open(args.file, 'r') as f: + network = {} + mapper = km.KeplerMapper(verbose=0) + data = np.zeros((3,3)) + projected_data = mapper.fit_transform( data, projection="sum", scaler=None ) + + nodes = defaultdict(list) + links = defaultdict(list) + custom = defaultdict(list) + + dat = f.readline() + lens = f.readline() + color = f.readline(); + param = [float(i) for i in f.readline().split(" ")] + + nums = [int(i) for i in f.readline().split(" ")] + num_nodes = nums[0] + num_edges = nums[1] + + for i in range(0,num_nodes): + point = [float(j) for j in f.readline().split(" ")] + nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ] + links[ str(int(point[0])) ] = [] + custom[ int(point[0]) ] = point[1] + + m = min([custom[i] for i in range(0,num_nodes)]) + M = max([custom[i] for i in range(0,num_nodes)]) + + for i in range(0,num_edges): + edge = [int(j) for j in f.readline().split(" ")] + links[ str(edge[0]) ].append( str(edge[1]) ) + links[ str(edge[1]) ].append( str(edge[0]) ) + + network["nodes"] = nodes + network["links"] = links + network["meta"] = lens + + html_output_filename = args.file.rsplit('.', 1)[0] + '.html' + mapper.visualize(network, color_function = color, path_html=html_output_filename, title=dat, + graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0, + height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M) + message = repr(html_output_filename) + " is generated. You can now use your favorite web browser to visualize it." + print(message) + + + f.close() diff --git a/src/Nerve_GIC/utilities/covercomplex.md b/src/Nerve_GIC/utilities/covercomplex.md new file mode 100644 index 00000000..692e44e7 --- /dev/null +++ b/src/Nerve_GIC/utilities/covercomplex.md @@ -0,0 +1,73 @@ +--- +layout: page +title: "Cover complex" +meta_title: "covercomplex" +subheadline: "" +teaser: "" +permalink: "/covercomplex/" +--- +{::comment} +These flags above are here for web site generation, please let them. +cf. https://gitlab.inria.fr/GUDHI/website +Must be in conformity with _data/navigation.yml +{:/comment} + + + +## Nerve ## +This program builds the Nerve of a point cloud sampled on an OFF file. +The cover C comes from the preimages of intervals covering a coordinate function, +which are then refined into their connected components using the triangulation of the .OFF file. + +The program also writes a file SC.txt. +The first three lines in this file are the location of the input point cloud and the function used to compute the cover. +The fourth line contains the number of vertices nv and edges ne of the Nerve. The next nv lines represent the vertices. +Each line contains the vertex ID, the number of data points it contains, and their average color function value. +Finally, the next ne lines represent the edges, characterized by the ID of their vertices. + +**Usage** + +`Nerve coordinate resolution gain [--v]` + +where + +* `coordinate` is the coordinate function to cover +* `resolution` is the number of the intervals +* `gain` is the gain for each interval +* `--v` is optional, it activates verbose mode. + +**Example** + +`Nerve ../../data/points/human.off 2 10 0.3` + +* Builds the Nerve of a point cloud sampled on a 3D human shape (human.off). +The cover C comes from the preimages of intervals (10 intervals with gain 0.3) covering the height function (coordinate 2). + +`python KeplerMapperVisuFromTxtFile.py -f ../../data/points/human.off_sc.txt` + +* Constructs `human.off_sc.html` file. You can now use your favorite web browser to visualize it. + +## VoronoiGIC ## + +This util builds the Graph Induced Complex (GIC) of a point cloud. +It subsamples *N* points in the point cloud, which act as seeds of a geodesic Voronoï diagram. +Each cell of the diagram is then an element of C. + +The program also writes a file `*_sc.off`, that is an OFF file that can be visualized with GeomView. + +**Usage** + +`VoroniGIC samples_number [--v]` + +where + +* `samples_number` is the number of samples to take from the point cloud +* `--v` is optional, it activates verbose mode. + +**Example** + +`VoroniGIC ../../data/points/human.off 700` + +* Builds the Voronoi Graph Induced Complex with 700 subsamples from `human.off` file. +`../../data/points/human_sc.off` can be visualized with GeomView. + diff --git a/src/Rips_complex/utilities/README b/src/Rips_complex/utilities/README index 4d20c806..44a37543 100644 --- a/src/Rips_complex/utilities/README +++ b/src/Rips_complex/utilities/README @@ -1,6 +1,20 @@ -# Rips_complex # - -## `rips_persistence` ## +--- +layout: page +title: "Rips complex" +meta_title: "ripscomplex" +subheadline: "" +teaser: "" +permalink: "/ripscomplex/" +--- +{::comment} +These flags above are here for web site generation, please let them. +cf. https://gitlab.inria.fr/GUDHI/website +Must be in conformity with _data/navigation.yml +{:/comment} + + + +## rips_persistence ## This program computes the persistent homology with coefficient field *Z/pZ* of a Rips complex defined on a set of input points. The output diagram contains one bar per line, written with the convention: `p dim birth death` @@ -8,6 +22,7 @@ This program computes the persistent homology with coefficient field *Z/pZ* of a where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number). **Usage** + `rips_persistence [options] ` **Allowed options** @@ -22,53 +37,25 @@ where `dim` is the dimension of the homological feature, `birth` and `death` are Beware: this program may use a lot of RAM and take a lot of time if `max-edge-length` is set to a large value. **Example 1 with Z/2Z coefficients** -`rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2` -outputs: -``` -2 0 0 inf -2 1 0.0983494 inf -2 1 0.104347 inf -2 2 0.138335 inf -``` +`rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2` **Example 2 with Z/3Z coefficients** -rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3 - -outputs: -``` -3 0 0 inf -3 1 0.0983494 inf -3 1 0.104347 inf -3 2 0.138335 inf -``` +`rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3` +## rips_distance_matrix_persistence ## +Same as `rips_persistence` but taking a distance matrix as input. -## `rips_distance_matrix_persistence` ## -Same as `rips_persistence` but taking a distance matrix as input. - **Usage** -`rips_persistence [options] ` -where + +`rips_persistence [options] ` + +where `` is the path to the file containing a distance matrix. Can be square or lower triangular matrix. Separator is ';'. **Example** -`rips_distance_matrix_persistence data/distance_matrix/full_square_distance_matrix.csv -r 15 -d 3 -p 3 -m 0` -outputs: -``` -The complex contains 46 simplices - and has dimension 3 -3 0 0 inf -3 0 0 8.94427 -3 0 0 7.28011 -3 0 0 6.08276 -3 0 0 5.83095 -3 0 0 5.38516 -3 0 0 5 -3 1 11 12.0416 -3 1 6.32456 6.7082 -``` +`rips_distance_matrix_persistence data/distance_matrix/full_square_distance_matrix.csv -r 15 -d 3 -p 3 -m 0` diff --git a/src/Witness_complex/utilities/README b/src/Witness_complex/utilities/README index 1141033e..5cdb1f88 100644 --- a/src/Witness_complex/utilities/README +++ b/src/Witness_complex/utilities/README @@ -1,18 +1,34 @@ -# Witness_complex # +--- +layout: page +title: "Witness complex" +meta_title: "witnesscomplex" +subheadline: "" +teaser: "" +permalink: "/witnesscomplex/" +--- +{::comment} +These flags above are here for web site generation, please let them. +cf. https://gitlab.inria.fr/GUDHI/website +Must be in conformity with _data/navigation.yml +{:/comment} + For more details about the witness complex, please read the [user manual of the package](http://gudhi.gforge.inria.fr/doc/latest/group__witness__complex.html). -## `weak_witness_persistence` ## -This program computes the persistent homology with coefficient field *Z/pZ* of a Weak witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention: +## weak_witness_persistence ## +This program computes the persistent homology with coefficient field *Z/pZ* of a Weak witness complex defined on a set of input points. +The output diagram contains one bar per line, written with the convention: `p dim birth death` -where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients. +where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, +and `p` is the characteristic of the field *Z/pZ* used for homology coefficients. + +**Usage** -*Usage* `weak_witness_persistence [options] ` -*Allowed options* +**Allowed options** * `-h [ --help ]` Produce help message * `-l [ --landmarks ]` Number of landmarks to choose from the point cloud. @@ -22,33 +38,28 @@ where `dim` is the dimension of the homological feature, `birth` and `death` are * `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. * `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute. -*Example* -`weak_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.006` +**Example** -outputs: -``` -Successfully read 1307 points. -Ambient dimension is 3. -The complex contains 732 simplices and has dimension 8 -11 0 0 inf -11 1 0 inf -11 2 0.0275251 0.0534586 -11 1 0 0.0239952 -``` +`weak_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.006` N.B.: output is random as the 20 landmarks are chosen randomly. -## `strong_witness_persistence` ## -This program computes the persistent homology with coefficient field *Z/pZ* of a Strong witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention: + +## strong_witness_persistence ## + +This program computes the persistent homology with coefficient field *Z/pZ* of a Strong witness complex defined on a set of input points. +The output diagram contains one bar per line, written with the convention: `p dim birth death` -where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients. +where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, +and `p` is the characteristic of the field *Z/pZ* used for homology coefficients. + +**Usage** -*Usage* `strong_witness_persistence [options] ` -*Allowed options* +**Allowed options** * `-h [ --help ]` Produce help message * `-l [ --landmarks ]` Number of landmarks to choose from the point cloud. @@ -58,17 +69,8 @@ where `dim` is the dimension of the homological feature, `birth` and `death` are * `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. * `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute. -*Example* -`strong_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.06` +**Example** -outputs: -``` -Successfully read 1307 points. -Ambient dimension is 3. -The complex contains 1836 simplices and has dimension 8 -11 0 0 inf -11 1 0.00674748 inf -11 2 0.0937751 0.235354 -``` +`strong_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.06` N.B.: output is random as the 20 landmarks are chosen randomly. diff --git a/src/common/utilities/README b/src/common/utilities/README index 18fa8cc4..f39c63b8 100644 --- a/src/common/utilities/README +++ b/src/common/utilities/README @@ -1,19 +1,43 @@ -# Pointset generator # +--- +layout: page +title: "Pointset generator" +meta_title: "pointsetgenerator" +subheadline: "" +teaser: "" +permalink: "/pointsetgenerator/" +--- +{::comment} +These flags above are here for web site generation, please let them. +cf. https://gitlab.inria.fr/GUDHI/website +Must be in conformity with _data/navigation.yml +{:/comment} -## `off_file_from_shape_generator` ## +## off_file_from_shape_generator ## Generates a pointset and save it in an OFF file. Command-line is: -`off_file_from_shape_generator on|in sphere|cube|curve|torus|klein ...` + +``` +off_file_from_shape_generator on|in sphere|cube|curve|torus|klein ... +``` Warning: "on cube" generator is not available! -Examples: +**Examples** + +``` +off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2 +``` + +* Generates an onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2. + +``` +off_file_from_shape_generator in sphere inSphere.off 100 2 +``` + +* Generates an inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default). -* Generate an onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2: -`off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2` - -* Generate an inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default): -`off_file_from_shape_generator in sphere inSphere.off 100 2` +``` +off_file_from_shape_generator in cube inCube.off 10000 3 5.8 +``` -* Generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8: -`off_file_from_shape_generator in cube inCube.off 10000 3 5.8` +* Generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8. -- cgit v1.2.3 From b92ea601f15a239957ec6beba78ee2deee3d32d6 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 30 Jan 2018 08:54:00 +0000 Subject: Fix --v to -v for Cover complex git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3182 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 46e36af1a3e9aae8f1b12d2ee7fc749eb531f505 --- src/Nerve_GIC/doc/Intro_graph_induced_complex.h | 8 ++++---- src/Nerve_GIC/example/CoordGIC.cpp | 4 ++-- src/Nerve_GIC/example/FuncGIC.cpp | 4 ++-- src/Nerve_GIC/utilities/Nerve.cpp | 4 ++-- src/Nerve_GIC/utilities/VoronoiGIC.cpp | 4 ++-- src/Nerve_GIC/utilities/covercomplex.md | 8 ++++---- .../doc/Persistence_representations_doc.h | 2 +- 7 files changed, 17 insertions(+), 17 deletions(-) (limited to 'src/Nerve_GIC/utilities') diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h index 474f0f0e..2b648425 100644 --- a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h +++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h @@ -70,7 +70,7 @@ namespace cover_complex { * * When launching: * - * \code $> ./Nerve ../../data/points/human.off 2 10 0.3 --v + * \code $> ./Nerve ../../data/points/human.off 2 10 0.3 -v * \endcode * * the program output is: @@ -113,7 +113,7 @@ namespace cover_complex { * * When launching: * - * \code $> ./VoronoiGIC ../../data/points/human.off 700 --v + * \code $> ./VoronoiGIC ../../data/points/human.off 700 -v * \endcode * * the program outputs SC.off. Using e.g. @@ -146,7 +146,7 @@ namespace cover_complex { * * When launching: * - * \code $> ./CoordGIC ../../data/points/KleinBottle5D.off 0 --v + * \code $> ./CoordGIC ../../data/points/KleinBottle5D.off 0 -v * \endcode * * the program outputs SC.dot. Using e.g. @@ -169,7 +169,7 @@ namespace cover_complex { * * When launching: * - * \code $> ./FuncGIC ../../data/points/COIL_database/lucky_cat.off ../../data/points/COIL_database/lucky_cat_PCA1 --v + * \code $> ./FuncGIC ../../data/points/COIL_database/lucky_cat.off ../../data/points/COIL_database/lucky_cat_PCA1 -v * \endcode * * the program outputs again SC.dot which gives the following visualization after using neato: diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp index c03fcbb3..c92cf235 100644 --- a/src/Nerve_GIC/example/CoordGIC.cpp +++ b/src/Nerve_GIC/example/CoordGIC.cpp @@ -27,8 +27,8 @@ void usage(int nbArgs, char *const progName) { std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.off coordinate [--v] \n"; - std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 --v \n"; + std::cerr << "Usage: " << progName << " filename.off coordinate [-v] \n"; + std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 -v \n"; exit(-1); // ----- >> } diff --git a/src/Nerve_GIC/example/FuncGIC.cpp b/src/Nerve_GIC/example/FuncGIC.cpp index 3762db4e..cb0f0d63 100644 --- a/src/Nerve_GIC/example/FuncGIC.cpp +++ b/src/Nerve_GIC/example/FuncGIC.cpp @@ -27,9 +27,9 @@ void usage(int nbArgs, char *const progName) { std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.off function [--v] \n"; + std::cerr << "Usage: " << progName << " filename.off function [-v] \n"; std::cerr << " i.e.: " << progName << " ../../data/points/COIL_database/lucky_cat.off " - "../../data/points/COIL_database/lucky_cat_PCA1 --v \n"; + "../../data/points/COIL_database/lucky_cat_PCA1 -v \n"; exit(-1); // ----- >> } diff --git a/src/Nerve_GIC/utilities/Nerve.cpp b/src/Nerve_GIC/utilities/Nerve.cpp index 6abdedc7..aefc3874 100644 --- a/src/Nerve_GIC/utilities/Nerve.cpp +++ b/src/Nerve_GIC/utilities/Nerve.cpp @@ -27,8 +27,8 @@ void usage(int nbArgs, char *const progName) { std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [--v] \n"; - std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 --v \n"; + std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [-v] \n"; + std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 -v \n"; exit(-1); // ----- >> } diff --git a/src/Nerve_GIC/utilities/VoronoiGIC.cpp b/src/Nerve_GIC/utilities/VoronoiGIC.cpp index 32431cc2..54bb871e 100644 --- a/src/Nerve_GIC/utilities/VoronoiGIC.cpp +++ b/src/Nerve_GIC/utilities/VoronoiGIC.cpp @@ -27,8 +27,8 @@ void usage(int nbArgs, char *const progName) { std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; - std::cerr << "Usage: " << progName << " filename.off N [--v] \n"; - std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 --v \n"; + std::cerr << "Usage: " << progName << " filename.off N [-v] \n"; + std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 -v \n"; exit(-1); // ----- >> } diff --git a/src/Nerve_GIC/utilities/covercomplex.md b/src/Nerve_GIC/utilities/covercomplex.md index 692e44e7..41489b66 100644 --- a/src/Nerve_GIC/utilities/covercomplex.md +++ b/src/Nerve_GIC/utilities/covercomplex.md @@ -27,14 +27,14 @@ Finally, the next ne lines represent the edges, characterized by the ID of their **Usage** -`Nerve coordinate resolution gain [--v]` +`Nerve coordinate resolution gain [-v]` where * `coordinate` is the coordinate function to cover * `resolution` is the number of the intervals * `gain` is the gain for each interval -* `--v` is optional, it activates verbose mode. +* `-v` is optional, it activates verbose mode. **Example** @@ -57,12 +57,12 @@ The program also writes a file `*_sc.off`, that is an OFF file that can be visua **Usage** -`VoroniGIC samples_number [--v]` +`VoroniGIC samples_number [-v]` where * `samples_number` is the number of samples to take from the point cloud -* `--v` is optional, it activates verbose mode. +* `-v` is optional, it activates verbose mode. **Example** diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h index d781211a..38bd3a21 100644 --- a/src/Persistence_representations/doc/Persistence_representations_doc.h +++ b/src/Persistence_representations/doc/Persistence_representations_doc.h @@ -67,7 +67,7 @@ namespace Persistence_representations { \li Concept of representation of persistence that allows computations of distances. \li Concept of representation of persistence that allows computations of scalar products. \li Concept of representation of persistence that allows vectorization. - \li Concept of representation of persistence that allows computations of real--valued characteristics of objects. + \li Concept of representation of persistence that allows computations of real-valued characteristics of objects. At the moment an implementation of the following representations of persistence are available (further details of -- cgit v1.2.3 From 0fe356ae3a1071950382ab308b03fd4a647ae796 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 30 Jan 2018 15:39:37 +0000 Subject: Copy test files in build directory for not to pollute sources git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3185 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 4926f0a4e3c02ce77bdbf5847d421b6152a24a65 --- src/Nerve_GIC/example/CMakeLists.txt | 11 ++++++++--- src/Nerve_GIC/utilities/CMakeLists.txt | 6 ++++-- 2 files changed, 12 insertions(+), 5 deletions(-) (limited to 'src/Nerve_GIC/utilities') diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt index 434637fa..f2626927 100644 --- a/src/Nerve_GIC/example/CMakeLists.txt +++ b/src/Nerve_GIC/example/CMakeLists.txt @@ -11,11 +11,16 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) target_link_libraries(FuncGIC ${TBB_LIBRARIES}) endif() + # Copy files for not to pollute sources when testing + file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0") + "tore3D_1307.off" "0") add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off" - "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1") + "lucky_cat.off" + "lucky_cat_PCA1") endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt index a0508dc2..7762c8a0 100644 --- a/src/Nerve_GIC/utilities/CMakeLists.txt +++ b/src/Nerve_GIC/utilities/CMakeLists.txt @@ -12,11 +12,13 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) endif() file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + # Copy files for not to pollute sources when testing + file(COPY "${CMAKE_SOURCE_DIR}/data/points/human.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) add_test(NAME Nerve_GIC_utilities_nerve COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/human.off" "2" "10" "0.3") + "human.off" "2" "10" "0.3") add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $ - "${CMAKE_SOURCE_DIR}/data/points/human.off" "100") + "human.off" "100") endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) -- cgit v1.2.3 From 37ded65d83935d417fc3bc484594a83b36f63514 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 30 Jan 2018 17:05:03 +0000 Subject: Fix after web review git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3189 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: fd42bab9b27a3c086adeaf75068c83a911ede000 --- src/Alpha_complex/utilities/alphacomplex.md | 100 +++++++++------------ .../utilities/cubicalcomplex.md | 17 +--- .../utilities/bottleneckdistance.md | 16 +--- src/Nerve_GIC/utilities/covercomplex.md | 15 +--- src/Rips_complex/utilities/ripscomplex.md | 16 +--- src/Witness_complex/utilities/witnesscomplex.md | 22 ++--- src/common/utilities/pointsetgenerator.md | 16 +--- 7 files changed, 62 insertions(+), 140 deletions(-) (limited to 'src/Nerve_GIC/utilities') diff --git a/src/Alpha_complex/utilities/alphacomplex.md b/src/Alpha_complex/utilities/alphacomplex.md index 0e500e53..aace85d3 100644 --- a/src/Alpha_complex/utilities/alphacomplex.md +++ b/src/Alpha_complex/utilities/alphacomplex.md @@ -1,18 +1,47 @@ ---- -layout: page -title: "Alpha complex" -meta_title: "alphacomplex" -subheadline: "" -teaser: "" -permalink: "/alphacomplex/" ---- -{::comment} -These flags above are here for web site generation, please let them. -cf. https://gitlab.inria.fr/GUDHI/website -Must be in conformity with _data/navigation.yml -{:/comment} +# Alpha complex # + + +## alpha_complex_persistence ## + +This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud. +The output diagram contains one bar per line, written with the convention: + +``` + p dim birth death +``` + +where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, +and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number). + +**Usage** + +``` + alpha_complex_persistence [options] +``` + +where +`` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). + +**Allowed options** + +* `-h [ --help ]` Produce help message +* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output. +* `-r [ --max-alpha-square-value ]` (default = inf) Maximal alpha square value for the Alpha complex construction. +* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology. +* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. + +**Example** + +``` + alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off +``` + +N.B.: + +* Filtration values are alpha square values. + ## alpha_complex_3d_persistence ## This program computes the persistent homology with coefficient field Z/pZ of the 3D alpha complex built from a 3D point cloud. The output diagram contains one bar per line, written with the convention: @@ -107,7 +136,7 @@ where * `` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). * `` is the path to the file describing the periodic domain. It must be in the format described -[here](../doc/latest/fileformats.html#FileFormatsIsoCuboid). +[here](/doc/latest/fileformats.html#FileFormatsIsoCuboid). **Allowed options** @@ -125,46 +154,5 @@ periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off N.B.: -* Cuboid file must be in the format described [here](../doc/latest/fileformats.html#FileFormatsIsoCuboid). -* Filtration values are alpha square values. - - - -## alpha_complex_persistence ## - -This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud. -The output diagram contains one bar per line, written with the convention: - -``` - p dim birth death -``` - -where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, -and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number). - -**Usage** - -``` - alpha_complex_persistence [options] -``` - -where -`` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html). - -**Allowed options** - -* `-h [ --help ]` Produce help message -* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output. -* `-r [ --max-alpha-square-value ]` (default = inf) Maximal alpha square value for the Alpha complex construction. -* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology. -* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals. - -**Example** - -``` - alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off -``` - -N.B.: - +* Cuboid file must be in the format described [here](/doc/latest/fileformats.html#FileFormatsIsoCuboid). * Filtration values are alpha square values. diff --git a/src/Bitmap_cubical_complex/utilities/cubicalcomplex.md b/src/Bitmap_cubical_complex/utilities/cubicalcomplex.md index 3a9e6aac..6e1b2578 100644 --- a/src/Bitmap_cubical_complex/utilities/cubicalcomplex.md +++ b/src/Bitmap_cubical_complex/utilities/cubicalcomplex.md @@ -1,21 +1,10 @@ ---- -layout: page -title: "Bitmap cubical complex" -meta_title: "cubicalcomplex" -subheadline: "" -teaser: "" -permalink: "/cubicalcomplex/" ---- -{::comment} -These flags above are here for web site generation, please let them. -cf. https://gitlab.inria.fr/GUDHI/website -Must be in conformity with _data/navigation.yml -{:/comment} +# Cubical complex# + ## cubical_complex_persistence ## This program computes persistent homology, by using the Bitmap_cubical_complex class, of cubical complexes provided in text files in Perseus style. -See [here](../doc/latest/fileformats.html#FileFormatsPerseus) for a description of the file format. +See [here](/doc/latest/fileformats.html#FileFormatsPerseus) for a description of the file format. **Example** diff --git a/src/Bottleneck_distance/utilities/bottleneckdistance.md b/src/Bottleneck_distance/utilities/bottleneckdistance.md index e9079058..526f5822 100644 --- a/src/Bottleneck_distance/utilities/bottleneckdistance.md +++ b/src/Bottleneck_distance/utilities/bottleneckdistance.md @@ -1,18 +1,6 @@ ---- -layout: page -title: "Bottleneck distance" -meta_title: "bottleneckdistance" -subheadline: "" -teaser: "" -permalink: "/bottleneckdistance/" ---- -{::comment} -These flags above are here for web site generation, please let them. -cf. https://gitlab.inria.fr/GUDHI/website -Must be in conformity with _data/navigation.yml -{:/comment} +# Bottleneck distance # ## bottleneck_read_file_example ## @@ -26,5 +14,5 @@ This program computes the Bottleneck distance between two persistence diagram fi where -* `` and `` must be in the format described [here](../doc/latest/fileformats.html#FileFormatsPers). +* `` and `` must be in the format described [here](/doc/latest/fileformats.html#FileFormatsPers). * `` is an error bound on the bottleneck distance (set by default to the smallest positive double value). diff --git a/src/Nerve_GIC/utilities/covercomplex.md b/src/Nerve_GIC/utilities/covercomplex.md index 41489b66..f33cb2e0 100644 --- a/src/Nerve_GIC/utilities/covercomplex.md +++ b/src/Nerve_GIC/utilities/covercomplex.md @@ -1,18 +1,7 @@ ---- -layout: page -title: "Cover complex" -meta_title: "covercomplex" -subheadline: "" -teaser: "" -permalink: "/covercomplex/" ---- -{::comment} -These flags above are here for web site generation, please let them. -cf. https://gitlab.inria.fr/GUDHI/website -Must be in conformity with _data/navigation.yml -{:/comment} +# Cover complex # + ## Nerve ## This program builds the Nerve of a point cloud sampled on an OFF file. diff --git a/src/Rips_complex/utilities/ripscomplex.md b/src/Rips_complex/utilities/ripscomplex.md index 44a37543..4291fae7 100644 --- a/src/Rips_complex/utilities/ripscomplex.md +++ b/src/Rips_complex/utilities/ripscomplex.md @@ -1,21 +1,9 @@ ---- -layout: page -title: "Rips complex" -meta_title: "ripscomplex" -subheadline: "" -teaser: "" -permalink: "/ripscomplex/" ---- -{::comment} -These flags above are here for web site generation, please let them. -cf. https://gitlab.inria.fr/GUDHI/website -Must be in conformity with _data/navigation.yml -{:/comment} +# Rips complex # ## rips_persistence ## -This program computes the persistent homology with coefficient field *Z/pZ* of a Rips complex defined on a set of input points. The output diagram contains one bar per line, written with the convention: +This program computes the persistent homology with coefficient field *Z/pZ* of a Rips complex defined on a set of input points, using Euclidean distance. The output diagram contains one bar per line, written with the convention: `p dim birth death` diff --git a/src/Witness_complex/utilities/witnesscomplex.md b/src/Witness_complex/utilities/witnesscomplex.md index 40864871..2341759b 100644 --- a/src/Witness_complex/utilities/witnesscomplex.md +++ b/src/Witness_complex/utilities/witnesscomplex.md @@ -1,19 +1,9 @@ ---- -layout: page -title: "Witness complex" -meta_title: "witnesscomplex" -subheadline: "" -teaser: "" -permalink: "/witnesscomplex/" ---- -{::comment} -These flags above are here for web site generation, please let them. -cf. https://gitlab.inria.fr/GUDHI/website -Must be in conformity with _data/navigation.yml -{:/comment} - - -For more details about the witness complex, please read the [user manual of the package](../doc/latest/group__witness__complex.html). + + +# Witness complex # + + +For more details about the witness complex, please read the [user manual of the package](/doc/latest/group__witness__complex.html). ## weak_witness_persistence ## This program computes the persistent homology with coefficient field *Z/pZ* of a Weak witness complex defined on a set of input points. diff --git a/src/common/utilities/pointsetgenerator.md b/src/common/utilities/pointsetgenerator.md index f39c63b8..284715d4 100644 --- a/src/common/utilities/pointsetgenerator.md +++ b/src/common/utilities/pointsetgenerator.md @@ -1,16 +1,6 @@ ---- -layout: page -title: "Pointset generator" -meta_title: "pointsetgenerator" -subheadline: "" -teaser: "" -permalink: "/pointsetgenerator/" ---- -{::comment} -These flags above are here for web site generation, please let them. -cf. https://gitlab.inria.fr/GUDHI/website -Must be in conformity with _data/navigation.yml -{:/comment} + + +# common # ## off_file_from_shape_generator ## -- cgit v1.2.3 From 265484997185f3bf900744406206a2d64ca0a20d Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 2 Feb 2018 14:15:49 +0000 Subject: Fix after version release of Cover complex and Persistence representation installations Fix utils download link git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@3211 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 9596df8b0d0cdc3d8df79aa367a26b34a96d7f5e --- src/Nerve_GIC/example/CMakeLists.txt | 3 +++ src/Nerve_GIC/utilities/CMakeLists.txt | 4 ++++ src/Persistence_representations/example/CMakeLists.txt | 8 +++++--- src/Persistence_representations/utilities/CMakeLists.txt | 6 ++++++ .../utilities/persistence_intervals/CMakeLists.txt | 6 ++++++ src/common/doc/header.html | 4 ++-- src/cython/doc/_templates/layout.html | 4 ++-- 7 files changed, 28 insertions(+), 7 deletions(-) (limited to 'src/Nerve_GIC/utilities') diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt index f2626927..542c6af4 100644 --- a/src/Nerve_GIC/example/CMakeLists.txt +++ b/src/Nerve_GIC/example/CMakeLists.txt @@ -23,4 +23,7 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) "lucky_cat.off" "lucky_cat_PCA1") + install(TARGETS CoordGIC DESTINATION bin) + install(TARGETS FuncGIC DESTINATION bin) + endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt index 7762c8a0..7a838a8c 100644 --- a/src/Nerve_GIC/utilities/CMakeLists.txt +++ b/src/Nerve_GIC/utilities/CMakeLists.txt @@ -21,4 +21,8 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1) add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $ "human.off" "100") + install(TARGETS Nerve DESTINATION bin) + install(TARGETS VoronoiGIC DESTINATION bin) + install(FILES KeplerMapperVisuFromTxtFile.py km.py km.py.COPYRIGHT DESTINATION bin) + endif (NOT CGAL_VERSION VERSION_LESS 4.8.1) diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt index eb3258f8..54d719ac 100644 --- a/src/Persistence_representations/example/CMakeLists.txt +++ b/src/Persistence_representations/example/CMakeLists.txt @@ -4,24 +4,26 @@ project(Persistence_representations_example) add_executable ( Persistence_representations_example_landscape_on_grid persistence_landscape_on_grid.cpp ) add_test(NAME Persistence_representations_example_landscape_on_grid COMMAND $) +install(TARGETS Persistence_representations_example_landscape_on_grid DESTINATION bin) add_executable ( Persistence_representations_example_landscape persistence_landscape.cpp ) add_test(NAME Persistence_representations_example_landscape COMMAND $) +install(TARGETS Persistence_representations_example_landscape DESTINATION bin) add_executable ( Persistence_representations_example_intervals persistence_intervals.cpp ) add_test(NAME Persistence_representations_example_intervals COMMAND $ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers") +install(TARGETS Persistence_representations_example_intervals DESTINATION bin) add_executable ( Persistence_representations_example_vectors persistence_vectors.cpp ) add_test(NAME Persistence_representations_example_vectors COMMAND $) +install(TARGETS Persistence_representations_example_vectors DESTINATION bin) add_executable ( Persistence_representations_example_heat_maps persistence_heat_maps.cpp ) add_test(NAME Persistence_representations_example_heat_maps COMMAND $) - - - +install(TARGETS Persistence_representations_example_heat_maps DESTINATION bin) diff --git a/src/Persistence_representations/utilities/CMakeLists.txt b/src/Persistence_representations/utilities/CMakeLists.txt index 137eb0c1..fc51b1d6 100644 --- a/src/Persistence_representations/utilities/CMakeLists.txt +++ b/src/Persistence_representations/utilities/CMakeLists.txt @@ -10,6 +10,8 @@ function(add_persistence_representation_creation_utility creation_utility) add_test(NAME Persistence_representation_utilities_${creation_utility} COMMAND $ ${ARGN} "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "${CMAKE_CURRENT_BINARY_DIR}/../second.pers") + + install(TARGETS ${creation_utility} DESTINATION bin) endfunction(add_persistence_representation_creation_utility) function(add_persistence_representation_plot_utility plot_utility tool_extension) @@ -26,6 +28,8 @@ function(add_persistence_representation_plot_utility plot_utility tool_extension #add_test(NAME Persistence_representation_utilities_${plot_utility}_second_gnuplot COMMAND ${GNUPLOT_PATH} # "-e" "load '${CMAKE_CURRENT_BINARY_DIR}/../second.pers${tool_extension}_GnuplotScript'") endif() + + install(TARGETS ${plot_utility} DESTINATION bin) endfunction(add_persistence_representation_plot_utility) function(add_persistence_representation_function_utility function_utility tool_extension) @@ -44,6 +48,8 @@ function(add_persistence_representation_function_utility function_utility tool_e "${CMAKE_CURRENT_BINARY_DIR}/../first.pers${tool_extension}" "${CMAKE_CURRENT_BINARY_DIR}/../second.pers${tool_extension}") endif() + + install(TARGETS ${function_utility} DESTINATION bin) endfunction(add_persistence_representation_function_utility) add_subdirectory(persistence_heat_maps) diff --git a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt index 897e12a3..875ff45e 100644 --- a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt +++ b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt @@ -7,6 +7,8 @@ add_executable ( plot_histogram_of_intervals_lengths plot_histogram_of_intervals add_test(NAME plot_histogram_of_intervals_lengths COMMAND $ "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "-1") +install(TARGETS plot_histogram_of_intervals_lengths DESTINATION bin) + add_persistence_representation_plot_utility(plot_persistence_intervals "") add_persistence_representation_plot_utility(plot_persistence_Betti_numbers "") @@ -18,6 +20,8 @@ add_test(NAME Persistence_representation_utilities_compute_number_of_dominant_in COMMAND $ "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "-1" "2") +install(TARGETS compute_number_of_dominant_intervals DESTINATION bin) + if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) add_executable ( compute_bottleneck_distance compute_bottleneck_distance.cpp ) @@ -29,4 +33,6 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) "-1" "${CMAKE_CURRENT_BINARY_DIR}/../first.pers" "${CMAKE_CURRENT_BINARY_DIR}/../second.pers") + + install(TARGETS compute_bottleneck_distance DESTINATION bin) endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1) diff --git a/src/common/doc/header.html b/src/common/doc/header.html index 9c514381..d69b28fa 100644 --- a/src/common/doc/header.html +++ b/src/common/doc/header.html @@ -56,8 +56,8 @@ $extrastylesheet
  • diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html index 8e4eba40..c9356116 100644 --- a/src/cython/doc/_templates/layout.html +++ b/src/cython/doc/_templates/layout.html @@ -198,8 +198,8 @@
  • -- cgit v1.2.3