summaryrefslogtreecommitdiff
path: root/pyspike/distances.py
diff options
context:
space:
mode:
Diffstat (limited to 'pyspike/distances.py')
-rw-r--r--pyspike/distances.py77
1 files changed, 39 insertions, 38 deletions
diff --git a/pyspike/distances.py b/pyspike/distances.py
index db04c4e..b2eec92 100644
--- a/pyspike/distances.py
+++ b/pyspike/distances.py
@@ -17,7 +17,7 @@ from pyspike import PieceWiseConstFunc, PieceWiseLinFunc
# isi_distance
############################################################
def isi_distance(spikes1, spikes2):
- """ Computes the instantaneous isi-distance S_isi (t) of the two given
+ """ Computes the instantaneous isi-distance S_isi (t) of the two given
spike trains. The spike trains are expected to have auxiliary spikes at the
beginning and end of the interval. Use the function add_auxiliary_spikes to
add those spikes to the spike train.
@@ -27,9 +27,9 @@ def isi_distance(spikes1, spikes2):
- PieceWiseConstFunc describing the isi-distance.
"""
# check for auxiliary spikes - first and last spikes should be identical
- assert spikes1[0]==spikes2[0], \
+ assert spikes1[0] == spikes2[0], \
"Given spike trains seems not to have auxiliary spikes!"
- assert spikes1[-1]==spikes2[-1], \
+ assert spikes1[-1] == spikes2[-1], \
"Given spike trains seems not to have auxiliary spikes!"
# cython implementation
@@ -53,9 +53,9 @@ def spike_distance(spikes1, spikes2):
- PieceWiseLinFunc describing the spike-distance.
"""
# check for auxiliary spikes - first and last spikes should be identical
- assert spikes1[0]==spikes2[0], \
+ assert spikes1[0] == spikes2[0], \
"Given spike trains seems not to have auxiliary spikes!"
- assert spikes1[-1]==spikes2[-1], \
+ assert spikes1[-1] == spikes2[-1], \
"Given spike trains seems not to have auxiliary spikes!"
# cython implementation
@@ -74,33 +74,33 @@ def multi_distance(spike_trains, pair_distance_func, indices=None):
use isi_distance_multi or spike_distance_multi instead.
Computes the multi-variate distance for a set of spike-trains using the
- pair_dist_func to compute pair-wise distances. That is it computes the
+ pair_dist_func to compute pair-wise distances. That is it computes the
average distance of all pairs of spike-trains:
- S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
+ S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
where the sum goes over all pairs <i,j>.
Args:
- spike_trains: list of spike trains
- pair_distance_func: function computing the distance of two spike trains
- - indices: list of indices defining which spike trains to use,
+ - indices: list of indices defining which spike trains to use,
if None all given spike trains are used (default=None)
Returns:
- The averaged multi-variate distance of all pairs
"""
- if indices==None:
+ if indices is None:
indices = np.arange(len(spike_trains))
indices = np.array(indices)
# check validity of indices
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
+ "Invalid index list."
# generate a list of possible index pairs
- pairs = [(i,j) for i in indices for j in indices[i+1:]]
+ pairs = [(i, j) for i in indices for j in indices[i+1:]]
# start with first pair
- (i,j) = pairs[0]
+ (i, j) = pairs[0]
average_dist = pair_distance_func(spike_trains[i], spike_trains[j])
- for (i,j) in pairs[1:]:
+ for (i, j) in pairs[1:]:
current_dist = pair_distance_func(spike_trains[i], spike_trains[j])
- average_dist.add(current_dist) # add to the average
- average_dist.mul_scalar(1.0/len(pairs)) # normalize
+ average_dist.add(current_dist) # add to the average
+ average_dist.mul_scalar(1.0/len(pairs)) # normalize
return average_dist
@@ -113,45 +113,46 @@ def multi_distance_par(spike_trains, pair_distance_func, indices=None):
"""
num_threads = 2
-
lock = threading.Lock()
+
def run(spike_trains, index_pairs, average_dist):
- (i,j) = index_pairs[0]
+ (i, j) = index_pairs[0]
# print(i,j)
this_avrg = pair_distance_func(spike_trains[i], spike_trains[j])
- for (i,j) in index_pairs[1:]:
+ for (i, j) in index_pairs[1:]:
# print(i,j)
current_dist = pair_distance_func(spike_trains[i], spike_trains[j])
this_avrg.add(current_dist)
with lock:
- average_dist.add(this_avrg)
+ average_dist.add(this_avrg)
- if indices==None:
+ if indices is None:
indices = np.arange(len(spike_trains))
indices = np.array(indices)
# check validity of indices
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
+ "Invalid index list."
# generate a list of possible index pairs
- pairs = [(i,j) for i in indices for j in indices[i+1:]]
+ pairs = [(i, j) for i in indices for j in indices[i+1:]]
num_pairs = len(pairs)
# start with first pair
- (i,j) = pairs[0]
+ (i, j) = pairs[0]
average_dist = pair_distance_func(spike_trains[i], spike_trains[j])
# remove the one we already computed
pairs = pairs[1:]
# distribute the rest into num_threads pieces
- clustered_pairs = [ pairs[i::num_threads] for i in xrange(num_threads) ]
+ clustered_pairs = [pairs[n::num_threads] for n in xrange(num_threads)]
threads = []
for pairs in clustered_pairs:
- t = threading.Thread(target=run, args=(spike_trains, pairs, average_dist))
+ t = threading.Thread(target=run, args=(spike_trains, pairs,
+ average_dist))
threads.append(t)
t.start()
for t in threads:
t.join()
- average_dist.mul_scalar(1.0/num_pairs) # normalize
+ average_dist.mul_scalar(1.0/num_pairs) # normalize
return average_dist
@@ -161,11 +162,11 @@ def multi_distance_par(spike_trains, pair_distance_func, indices=None):
def isi_distance_multi(spike_trains, indices=None):
""" computes the multi-variate isi-distance for a set of spike-trains. That
is the average isi-distance of all pairs of spike-trains:
- S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
+ S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
where the sum goes over all pairs <i,j>
Args:
- spike_trains: list of spike trains
- - indices: list of indices defining which spike trains to use,
+ - indices: list of indices defining which spike trains to use,
if None all given spike trains are used (default=None)
Returns:
- A PieceWiseConstFunc representing the averaged isi distance S
@@ -177,13 +178,13 @@ def isi_distance_multi(spike_trains, indices=None):
# spike_distance_multi
############################################################
def spike_distance_multi(spike_trains, indices=None):
- """ computes the multi-variate spike-distance for a set of spike-trains.
+ """ computes the multi-variate spike-distance for a set of spike-trains.
That is the average spike-distance of all pairs of spike-trains:
- S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
+ S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i, j},
where the sum goes over all pairs <i,j>
Args:
- spike_trains: list of spike trains
- - indices: list of indices defining which spike-trains to use,
+ - indices: list of indices defining which spike-trains to use,
if None all given spike trains are used (default=None)
Returns:
- A PieceWiseLinFunc representing the averaged spike distance S
@@ -198,21 +199,21 @@ def isi_distance_matrix(spike_trains, indices=None):
- indices: list of indices defining which spike-trains to use
if None all given spike-trains are used (default=None)
Return:
- - a 2D array of size len(indices)*len(indices) containing the average
+ - a 2D array of size len(indices)*len(indices) containing the average
pair-wise isi-distance
"""
- if indices==None:
+ if indices is None:
indices = np.arange(len(spike_trains))
indices = np.array(indices)
# check validity of indices
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
+ "Invalid index list."
# generate a list of possible index pairs
- pairs = [(i,j) for i in indices for j in indices[i+1:]]
+ pairs = [(i, j) for i in indices for j in indices[i+1:]]
distance_matrix = np.zeros((len(indices), len(indices)))
- for i,j in pairs:
+ for i, j in pairs:
d = isi_distance(spike_trains[i], spike_trains[j]).abs_avrg()
- distance_matrix[i,j] = d
- distance_matrix[j,i] = d
+ distance_matrix[i, j] = d
+ distance_matrix[j, i] = d
return distance_matrix