summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2014-10-13 10:47:18 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2014-10-13 10:47:18 +0200
commit4274c328a4927b392036d1c3b759b0787b05f300 (patch)
tree37a4f331006c63e7155bfb4c083c7e149f567eb8
parentef15a482604d8ce9bef094d470d8a905c6da49a0 (diff)
code formatting following PEP8
-rw-r--r--examples/isi_matrix.py2
-rw-r--r--examples/plot.py6
-rw-r--r--pyspike/distances.py77
-rw-r--r--pyspike/function.py35
-rw-r--r--pyspike/python_backend.py72
-rw-r--r--pyspike/spikes.py48
-rw-r--r--test/test_distance.py37
-rw-r--r--test/test_function.py28
-rw-r--r--test/test_spikes.py27
9 files changed, 168 insertions, 164 deletions
diff --git a/examples/isi_matrix.py b/examples/isi_matrix.py
index 2a4d075..db740dd 100644
--- a/examples/isi_matrix.py
+++ b/examples/isi_matrix.py
@@ -11,7 +11,6 @@ Distributed under the MIT License (MIT)
from __future__ import print_function
-import numpy as np
import matplotlib.pyplot as plt
import pyspike as spk
@@ -25,4 +24,3 @@ m = spk.isi_distance_matrix(spike_trains)
plt.imshow(m, interpolation='none')
plt.show()
-
diff --git a/examples/plot.py b/examples/plot.py
index 4ff75c4..5c3ad4a 100644
--- a/examples/plot.py
+++ b/examples/plot.py
@@ -15,11 +15,11 @@ import matplotlib.pyplot as plt
import pyspike as spk
-spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0,4000))
+spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=(0, 4000))
# plot the spike time
-for (i,spikes) in enumerate(spike_trains):
+for (i, spikes) in enumerate(spike_trains):
plt.plot(spikes, i*np.ones_like(spikes), 'o')
f = spk.isi_distance(spike_trains[0], spike_trains[1])
diff --git a/pyspike/distances.py b/pyspike/distances.py
index db04c4e..b2eec92 100644
--- a/pyspike/distances.py
+++ b/pyspike/distances.py
@@ -17,7 +17,7 @@ from pyspike import PieceWiseConstFunc, PieceWiseLinFunc
# isi_distance
############################################################
def isi_distance(spikes1, spikes2):
- """ Computes the instantaneous isi-distance S_isi (t) of the two given
+ """ Computes the instantaneous isi-distance S_isi (t) of the two given
spike trains. The spike trains are expected to have auxiliary spikes at the
beginning and end of the interval. Use the function add_auxiliary_spikes to
add those spikes to the spike train.
@@ -27,9 +27,9 @@ def isi_distance(spikes1, spikes2):
- PieceWiseConstFunc describing the isi-distance.
"""
# check for auxiliary spikes - first and last spikes should be identical
- assert spikes1[0]==spikes2[0], \
+ assert spikes1[0] == spikes2[0], \
"Given spike trains seems not to have auxiliary spikes!"
- assert spikes1[-1]==spikes2[-1], \
+ assert spikes1[-1] == spikes2[-1], \
"Given spike trains seems not to have auxiliary spikes!"
# cython implementation
@@ -53,9 +53,9 @@ def spike_distance(spikes1, spikes2):
- PieceWiseLinFunc describing the spike-distance.
"""
# check for auxiliary spikes - first and last spikes should be identical
- assert spikes1[0]==spikes2[0], \
+ assert spikes1[0] == spikes2[0], \
"Given spike trains seems not to have auxiliary spikes!"
- assert spikes1[-1]==spikes2[-1], \
+ assert spikes1[-1] == spikes2[-1], \
"Given spike trains seems not to have auxiliary spikes!"
# cython implementation
@@ -74,33 +74,33 @@ def multi_distance(spike_trains, pair_distance_func, indices=None):
use isi_distance_multi or spike_distance_multi instead.
Computes the multi-variate distance for a set of spike-trains using the
- pair_dist_func to compute pair-wise distances. That is it computes the
+ pair_dist_func to compute pair-wise distances. That is it computes the
average distance of all pairs of spike-trains:
- S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
+ S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
where the sum goes over all pairs <i,j>.
Args:
- spike_trains: list of spike trains
- pair_distance_func: function computing the distance of two spike trains
- - indices: list of indices defining which spike trains to use,
+ - indices: list of indices defining which spike trains to use,
if None all given spike trains are used (default=None)
Returns:
- The averaged multi-variate distance of all pairs
"""
- if indices==None:
+ if indices is None:
indices = np.arange(len(spike_trains))
indices = np.array(indices)
# check validity of indices
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
+ "Invalid index list."
# generate a list of possible index pairs
- pairs = [(i,j) for i in indices for j in indices[i+1:]]
+ pairs = [(i, j) for i in indices for j in indices[i+1:]]
# start with first pair
- (i,j) = pairs[0]
+ (i, j) = pairs[0]
average_dist = pair_distance_func(spike_trains[i], spike_trains[j])
- for (i,j) in pairs[1:]:
+ for (i, j) in pairs[1:]:
current_dist = pair_distance_func(spike_trains[i], spike_trains[j])
- average_dist.add(current_dist) # add to the average
- average_dist.mul_scalar(1.0/len(pairs)) # normalize
+ average_dist.add(current_dist) # add to the average
+ average_dist.mul_scalar(1.0/len(pairs)) # normalize
return average_dist
@@ -113,45 +113,46 @@ def multi_distance_par(spike_trains, pair_distance_func, indices=None):
"""
num_threads = 2
-
lock = threading.Lock()
+
def run(spike_trains, index_pairs, average_dist):
- (i,j) = index_pairs[0]
+ (i, j) = index_pairs[0]
# print(i,j)
this_avrg = pair_distance_func(spike_trains[i], spike_trains[j])
- for (i,j) in index_pairs[1:]:
+ for (i, j) in index_pairs[1:]:
# print(i,j)
current_dist = pair_distance_func(spike_trains[i], spike_trains[j])
this_avrg.add(current_dist)
with lock:
- average_dist.add(this_avrg)
+ average_dist.add(this_avrg)
- if indices==None:
+ if indices is None:
indices = np.arange(len(spike_trains))
indices = np.array(indices)
# check validity of indices
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
+ "Invalid index list."
# generate a list of possible index pairs
- pairs = [(i,j) for i in indices for j in indices[i+1:]]
+ pairs = [(i, j) for i in indices for j in indices[i+1:]]
num_pairs = len(pairs)
# start with first pair
- (i,j) = pairs[0]
+ (i, j) = pairs[0]
average_dist = pair_distance_func(spike_trains[i], spike_trains[j])
# remove the one we already computed
pairs = pairs[1:]
# distribute the rest into num_threads pieces
- clustered_pairs = [ pairs[i::num_threads] for i in xrange(num_threads) ]
+ clustered_pairs = [pairs[n::num_threads] for n in xrange(num_threads)]
threads = []
for pairs in clustered_pairs:
- t = threading.Thread(target=run, args=(spike_trains, pairs, average_dist))
+ t = threading.Thread(target=run, args=(spike_trains, pairs,
+ average_dist))
threads.append(t)
t.start()
for t in threads:
t.join()
- average_dist.mul_scalar(1.0/num_pairs) # normalize
+ average_dist.mul_scalar(1.0/num_pairs) # normalize
return average_dist
@@ -161,11 +162,11 @@ def multi_distance_par(spike_trains, pair_distance_func, indices=None):
def isi_distance_multi(spike_trains, indices=None):
""" computes the multi-variate isi-distance for a set of spike-trains. That
is the average isi-distance of all pairs of spike-trains:
- S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
+ S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
where the sum goes over all pairs <i,j>
Args:
- spike_trains: list of spike trains
- - indices: list of indices defining which spike trains to use,
+ - indices: list of indices defining which spike trains to use,
if None all given spike trains are used (default=None)
Returns:
- A PieceWiseConstFunc representing the averaged isi distance S
@@ -177,13 +178,13 @@ def isi_distance_multi(spike_trains, indices=None):
# spike_distance_multi
############################################################
def spike_distance_multi(spike_trains, indices=None):
- """ computes the multi-variate spike-distance for a set of spike-trains.
+ """ computes the multi-variate spike-distance for a set of spike-trains.
That is the average spike-distance of all pairs of spike-trains:
- S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i,j},
+ S(t) = 2/((N(N-1)) sum_{<i,j>} S_{i, j},
where the sum goes over all pairs <i,j>
Args:
- spike_trains: list of spike trains
- - indices: list of indices defining which spike-trains to use,
+ - indices: list of indices defining which spike-trains to use,
if None all given spike trains are used (default=None)
Returns:
- A PieceWiseLinFunc representing the averaged spike distance S
@@ -198,21 +199,21 @@ def isi_distance_matrix(spike_trains, indices=None):
- indices: list of indices defining which spike-trains to use
if None all given spike-trains are used (default=None)
Return:
- - a 2D array of size len(indices)*len(indices) containing the average
+ - a 2D array of size len(indices)*len(indices) containing the average
pair-wise isi-distance
"""
- if indices==None:
+ if indices is None:
indices = np.arange(len(spike_trains))
indices = np.array(indices)
# check validity of indices
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
- "Invalid index list."
+ "Invalid index list."
# generate a list of possible index pairs
- pairs = [(i,j) for i in indices for j in indices[i+1:]]
+ pairs = [(i, j) for i in indices for j in indices[i+1:]]
distance_matrix = np.zeros((len(indices), len(indices)))
- for i,j in pairs:
+ for i, j in pairs:
d = isi_distance(spike_trains[i], spike_trains[j]).abs_avrg()
- distance_matrix[i,j] = d
- distance_matrix[j,i] = d
+ distance_matrix[i, j] = d
+ distance_matrix[j, i] = d
return distance_matrix
diff --git a/pyspike/function.py b/pyspike/function.py
index 243ef67..8107538 100644
--- a/pyspike/function.py
+++ b/pyspike/function.py
@@ -1,7 +1,7 @@
""" function.py
-Module containing classes representing piece-wise constant and piece-wise linear
-functions.
+Module containing classes representing piece-wise constant and piece-wise
+linear functions.
Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
@@ -35,7 +35,7 @@ class PieceWiseConstFunc:
Args:
- other: another PieceWiseConstFunc object
Returns:
- True if the two functions are equal up to `decimal` decimals,
+ True if the two functions are equal up to `decimal` decimals,
False otherwise
"""
eps = 10.0**(-decimal)
@@ -61,23 +61,23 @@ class PieceWiseConstFunc:
""" Computes the average of the piece-wise const function:
a = 1/T int f(x) dx where T is the length of the interval.
Returns:
- - the average a.
+ - the average a.
"""
return np.sum((self.x[1:]-self.x[:-1]) * self.y) / \
(self.x[-1]-self.x[0])
def abs_avrg(self):
- """ Computes the average of the abs value of the piece-wise const
+ """ Computes the average of the abs value of the piece-wise const
function:
a = 1/T int |f(x)| dx where T is the length of the interval.
Returns:
- - the average a.
+ - the average a.
"""
return np.sum((self.x[1:]-self.x[:-1]) * np.abs(self.y)) / \
(self.x[-1]-self.x[0])
def add(self, f):
- """ Adds another PieceWiseConst function to this function.
+ """ Adds another PieceWiseConst function to this function.
Note: only functions defined on the same interval can be summed.
Args:
- f: PieceWiseConst function to be added.
@@ -87,13 +87,13 @@ class PieceWiseConstFunc:
# python implementation
# from python_backend import add_piece_wise_const_python
- # self.x, self.y = add_piece_wise_const_python(self.x, self.y, f.x, f.y)
+ # self.x, self.y = add_piece_wise_const_python(self.x, self.y,
+ # f.x, f.y)
# cython version
from cython_add import add_piece_wise_const_cython
self.x, self.y = add_piece_wise_const_cython(self.x, self.y, f.x, f.y)
-
def mul_scalar(self, fac):
""" Multiplies the function with a scalar value
Args:
@@ -113,10 +113,10 @@ class PieceWiseLinFunc:
Args:
- x: array of length N+1 defining the edges of the intervals of the pwc
function.
- - y1: array of length N defining the function values at the left of the
- intervals.
- - y2: array of length N defining the function values at the right of the
+ - y1: array of length N defining the function values at the left of the
intervals.
+ - y2: array of length N defining the function values at the right of
+ the intervals.
"""
self.x = np.array(x)
self.y1 = np.array(y1)
@@ -128,7 +128,7 @@ class PieceWiseLinFunc:
Args:
- other: another PieceWiseLinFunc object
Returns:
- True if the two functions are equal up to `decimal` decimals,
+ True if the two functions are equal up to `decimal` decimals,
False otherwise
"""
eps = 10.0**(-decimal)
@@ -153,7 +153,7 @@ class PieceWiseLinFunc:
""" Computes the average of the piece-wise linear function:
a = 1/T int f(x) dx where T is the length of the interval.
Returns:
- - the average a.
+ - the average a.
"""
return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) / \
(self.x[-1]-self.x[0])
@@ -162,13 +162,13 @@ class PieceWiseLinFunc:
""" Computes the absolute average of the piece-wise linear function:
a = 1/T int |f(x)| dx where T is the length of the interval.
Returns:
- - the average a.
+ - the average a.
"""
return np.sum((self.x[1:]-self.x[:-1]) * 0.5 *
(np.abs(self.y1)+np.abs(self.y2)))/(self.x[-1]-self.x[0])
def add(self, f):
- """ Adds another PieceWiseLin function to this function.
+ """ Adds another PieceWiseLin function to this function.
Note: only functions defined on the same interval can be summed.
Args:
- f: PieceWiseLin function to be added.
@@ -178,7 +178,7 @@ class PieceWiseLinFunc:
# python implementation
# from python_backend import add_piece_wise_lin_python
- # self.x, self.y1, self.y2 = add_piece_wise_lin_python(
+ # self.x, self.y1, self.y2 = add_piece_wise_lin_python(
# self.x, self.y1, self.y2, f.x, f.y1, f.y2)
# cython version
@@ -186,7 +186,6 @@ class PieceWiseLinFunc:
self.x, self.y1, self.y2 = add_piece_wise_lin_cython(
self.x, self.y1, self.y2, f.x, f.y1, f.y2)
-
def mul_scalar(self, fac):
""" Multiplies the function with a scalar value
Args:
diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py
index e5b74e9..cf1a92f 100644
--- a/pyspike/python_backend.py
+++ b/pyspike/python_backend.py
@@ -1,6 +1,6 @@
""" python_backend.py
-Collection of python functions that can be used instead of the cython
+Collection of python functions that can be used instead of the cython
implementation.
Copyright 2014, Mario Mulansky <mario.mulansky@gmx.net>
@@ -21,18 +21,18 @@ def isi_distance_python(s1, s2):
""" Plain Python implementation of the isi distance.
"""
# compute the interspike interval
- nu1 = s1[1:]-s1[:-1]
- nu2 = s2[1:]-s2[:-1]
-
+ nu1 = s1[1:] - s1[:-1]
+ nu2 = s2[1:] - s2[:-1]
+
# compute the isi-distance
- spike_events = np.empty(len(nu1)+len(nu2))
+ spike_events = np.empty(len(nu1) + len(nu2))
spike_events[0] = s1[0]
# the values have one entry less - the number of intervals between events
- isi_values = np.empty(len(spike_events)-1)
+ isi_values = np.empty(len(spike_events) - 1)
# add the distance of the first events
# isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \
# else 1.0 - nu2[0]/nu1[0]
- isi_values[0] = (nu1[0]-nu2[0])/max(nu1[0],nu2[0])
+ isi_values[0] = (nu1[0] - nu2[0]) / max(nu1[0], nu2[0])
index1 = 0
index2 = 0
index = 1
@@ -49,28 +49,28 @@ def isi_distance_python(s1, s2):
if index2 >= len(nu2):
break
spike_events[index] = s2[index2]
- else: # s1[index1+1] == s2[index2+1]
+ else: # s1[index1 + 1] == s2[index2 + 1]
index1 += 1
index2 += 1
if (index1 >= len(nu1)) or (index2 >= len(nu2)):
break
spike_events[index] = s1[index1]
# compute the corresponding isi-distance
- isi_values[index] = (nu1[index1]-nu2[index2]) / \
- max(nu1[index1], nu2[index2])
+ isi_values[index] = (nu1[index1] - nu2[index2]) / \
+ max(nu1[index1], nu2[index2])
index += 1
# the last event is the interval end
spike_events[index] = s1[-1]
- # use only the data added above
+ # use only the data added above
# could be less than original length due to equal spike times
- return PieceWiseConstFunc(spike_events[:index+1], isi_values[:index])
+ return PieceWiseConstFunc(spike_events[:index + 1], isi_values[:index])
############################################################
# get_min_dist
############################################################
def get_min_dist(spike_time, spike_train, start_index=0):
- """ Returns the minimal distance |spike_time - spike_train[i]|
+ """ Returns the minimal distance |spike_time - spike_train[i]|
with i>=start_index.
"""
d = abs(spike_time - spike_train[start_index])
@@ -99,18 +99,18 @@ def spike_distance_python(spikes1, spikes2):
- PieceWiseLinFunc describing the spike-distance.
"""
# check for auxiliary spikes - first and last spikes should be identical
- assert spikes1[0]==spikes2[0], \
+ assert spikes1[0] == spikes2[0], \
"Given spike trains seems not to have auxiliary spikes!"
- assert spikes1[-1]==spikes2[-1], \
+ assert spikes1[-1] == spikes2[-1], \
"Given spike trains seems not to have auxiliary spikes!"
# shorter variables
t1 = spikes1
t2 = spikes2
- spike_events = np.empty(len(t1)+len(t2)-2)
+ spike_events = np.empty(len(t1) + len(t2) - 2)
spike_events[0] = t1[0]
- y_starts = np.empty(len(spike_events)-1)
- y_ends = np.empty(len(spike_events)-1)
+ y_starts = np.empty(len(spike_events) - 1)
+ y_ends = np.empty(len(spike_events) - 1)
index1 = 0
index2 = 0
@@ -133,9 +133,10 @@ def spike_distance_python(spikes1, spikes2):
break
spike_events[index] = t1[index1]
# first calculate the previous interval end value
- dt_p1 = dt_f1 # the previous time now was the following time before
+ dt_p1 = dt_f1 # the previous time was the following time before
s1 = dt_p1
- s2 = (dt_p2*(t2[index2+1]-t1[index1]) + dt_f2*(t1[index1]-t2[index2])) / isi2
+ s2 = (dt_p2*(t2[index2+1]-t1[index1]) +
+ dt_f2*(t1[index1]-t2[index2])) / isi2
y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2)
# now the next interval start value
dt_f1 = get_min_dist(t1[index1+1], t2, index2)
@@ -148,8 +149,9 @@ def spike_distance_python(spikes1, spikes2):
break
spike_events[index] = t2[index2]
# first calculate the previous interval end value
- dt_p2 = dt_f2 # the previous time now was the following time before
- s1 = (dt_p1*(t1[index1+1]-t2[index2]) + dt_f1*(t2[index2]-t1[index1])) / isi1
+ dt_p2 = dt_f2 # the previous time was the following time before
+ s1 = (dt_p1*(t1[index1+1]-t2[index2]) +
+ dt_f1*(t2[index2]-t1[index1])) / isi1
s2 = dt_p2
y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2)
# now the next interval start value
@@ -158,7 +160,7 @@ def spike_distance_python(spikes1, spikes2):
isi2 = t2[index2+1]-t2[index2]
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2)
- else: # t1[index1+1] == t2[index2+1] - generate only one event
+ else: # t1[index1+1] == t2[index2+1] - generate only one event
index1 += 1
index2 += 1
if (index1+1 >= len(t1)) or (index2+1 >= len(t2)):
@@ -183,9 +185,9 @@ def spike_distance_python(spikes1, spikes2):
s1 = dt_p1*(t1[-1]-t1[-2])/isi1
s2 = dt_p2*(t2[-1]-t2[-2])/isi2
y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2)
- # use only the data added above
+ # use only the data added above
# could be less than original length due to equal spike times
- return PieceWiseLinFunc(spike_events[:index+1],
+ return PieceWiseLinFunc(spike_events[:index+1],
y_starts[:index], y_ends[:index])
@@ -209,7 +211,7 @@ def add_piece_wise_const_python(x1, y1, x2, y2):
elif x1[index1+1] > x2[index2+1]:
index2 += 1
x_new[index] = x2[index2]
- else: # x1[index1+1] == x2[index2+1]:
+ else: # x1[index1+1] == x2[index2+1]:
index1 += 1
index2 += 1
x_new[index] = x1[index1]
@@ -217,15 +219,13 @@ def add_piece_wise_const_python(x1, y1, x2, y2):
# one array reached the end -> copy the contents of the other to the end
if index1+1 < len(y1):
x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:]
- y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + \
- y2[-1]
+ y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1]
index += len(x1)-index1-2
elif index2+1 < len(y2):
x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:]
- y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + \
- y1[-1]
+ y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1]
index += len(x2)-index2-2
- else: # both arrays reached the end simultaneously
+ else: # both arrays reached the end simultaneously
# only the last x-value missing
x_new[index+1] = x1[-1]
# the last value is again the end of the interval
@@ -244,9 +244,9 @@ def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22):
y2_new = np.empty_like(y1_new)
x_new[0] = x1[0]
y1_new[0] = y11[0] + y21[0]
- index1 = 0 # index for self
- index2 = 0 # index for f
- index = 0 # index for new
+ index1 = 0 # index for self
+ index2 = 0 # index for f
+ index = 0 # index for new
while (index1+1 < len(y11)) and (index2+1 < len(y21)):
# print(index1+1, x1[index1+1], self.y[index1+1], x_new[index])
if x1[index1+1] < x2[index2+1]:
@@ -272,7 +272,7 @@ def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22):
x_new[index] = x2[index2]
# and the starting value for the next interval
y1_new[index] = y21[index2] + y
- else: # x1[index1+1] == x2[index2+1]:
+ else: # x1[index1+1] == x2[index2+1]:
y2_new[index] = y12[index1] + y22[index2]
index1 += 1
index2 += 1
@@ -297,7 +297,7 @@ def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22):
y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y
y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y
index += len(x2)-index2-2
- else: # both arrays reached the end simultaneously
+ else: # both arrays reached the end simultaneously
# only the last x-value missing
x_new[index+1] = x1[-1]
# finally, the end value for the last interval
diff --git a/pyspike/spikes.py b/pyspike/spikes.py
index 6ea94de..c496ab8 100644
--- a/pyspike/spikes.py
+++ b/pyspike/spikes.py
@@ -31,11 +31,11 @@ def add_auxiliary_spikes(spike_train, time_interval):
except:
T_start = 0
T_end = time_interval
-
+
assert spike_train[0] >= T_start, \
- "Spike train has events before the given start time"
+ "Spike train has events before the given start time"
assert spike_train[-1] <= T_end, \
- "Spike train has events after the given end time"
+ "Spike train has events after the given end time"
if spike_train[0] != T_start:
spike_train = np.insert(spike_train, 0, T_start)
if spike_train[-1] != T_end:
@@ -64,16 +64,16 @@ def spike_train_from_string(s, sep=' ', sort=True):
############################################################
# load_spike_trains_txt
############################################################
-def load_spike_trains_from_txt(file_name, time_interval=None,
+def load_spike_trains_from_txt(file_name, time_interval=None,
separator=' ', comment='#', sort=True):
- """ Loads a number of spike trains from a text file. Each line of the text
- file should contain one spike train as a sequence of spike times separated
- by `separator`. Empty lines as well as lines starting with `comment` are
- neglected. The `time_interval` represents the start and the end of the spike
- trains and it is used to add auxiliary spikes at the beginning and end of
- each spike train. However, if `time_interval == None`, no auxiliary spikes
- are added, but note that the Spike and ISI distance both require auxiliary
- spikes.
+ """ Loads a number of spike trains from a text file. Each line of the text
+ file should contain one spike train as a sequence of spike times separated
+ by `separator`. Empty lines as well as lines starting with `comment` are
+ neglected. The `time_interval` represents the start and the end of the
+ spike trains and it is used to add auxiliary spikes at the beginning and
+ end of each spike train. However, if `time_interval == None`, no auxiliary
+ spikes are added, but note that the Spike and ISI distance both require
+ auxiliary spikes.
Args:
- file_name: The name of the text file.
- time_interval: A pair (T_start, T_end) of values representing the start
@@ -87,10 +87,10 @@ def load_spike_trains_from_txt(file_name, time_interval=None,
spike_trains = []
spike_file = open(file_name, 'r')
for line in spike_file:
- if len(line) > 1 and not line.startswith(comment):
+ if len(line) > 1 and not line.startswith(comment):
# use only the lines with actual data and not commented
spike_train = spike_train_from_string(line, separator, sort)
- if not time_interval == None: # add auxiliary spikes if times given
+ if time_interval is not None: # add auxil. spikes if times given
spike_train = add_auxiliary_spikes(spike_train, time_interval)
spike_trains.append(spike_train)
return spike_trains
@@ -109,19 +109,19 @@ def merge_spike_trains(spike_trains):
# get the lengths of the spike trains
lens = np.array([len(st) for st in spike_trains])
merged_spikes = np.empty(np.sum(lens))
- index = 0 # the index for merged_spikes
- indices = np.zeros_like(lens) # indices of the spike trains
- index_list = np.arange(len(indices)) # indices of indices of spike trains
- # that have not yet reached the end
+ index = 0 # the index for merged_spikes
+ indices = np.zeros_like(lens) # indices of the spike trains
+ index_list = np.arange(len(indices)) # indices of indices of spike trains
+ # that have not yet reached the end
# list of the possible events in the spike trains
vals = [spike_trains[i][indices[i]] for i in index_list]
while len(index_list) > 0:
- i = np.argmin(vals) # the next spike is the minimum
- merged_spikes[index] = vals[i] # put it to the merged spike train
+ i = np.argmin(vals) # the next spike is the minimum
+ merged_spikes[index] = vals[i] # put it to the merged spike train
i = index_list[i]
- index += 1 # next index of merged spike train
- indices[i] += 1 # next index for the chosen spike train
- if indices[i] >= lens[i]: # remove spike train index if ended
+ index += 1 # next index of merged spike train
+ indices[i] += 1 # next index for the chosen spike train
+ if indices[i] >= lens[i]: # remove spike train index if ended
index_list = index_list[index_list != i]
- vals = [spike_trains[i][indices[i]] for i in index_list]
+ vals = [spike_trains[n][indices[n]] for n in index_list]
return merged_spikes
diff --git a/test/test_distance.py b/test/test_distance.py
index dafe693..3371cbd 100644
--- a/test/test_distance.py
+++ b/test/test_distance.py
@@ -22,8 +22,8 @@ def test_isi():
t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95])
# pen&paper calculation of the isi distance
- expected_times = [0.0,0.2,0.3,0.4,0.45,0.6,0.7,0.8,0.9,0.95,1.0]
- expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35,
+ expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0]
+ expected_isi = [-0.1/0.3, -0.1/0.3, 0.05/0.2, 0.05/0.2, -0.15/0.35,
-0.25/0.35, -0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3]
t1 = spk.add_auxiliary_spikes(t1, 1.0)
@@ -36,10 +36,10 @@ def test_isi():
assert_array_almost_equal(f.y, expected_isi, decimal=14)
# check with some equal spike times
- t1 = np.array([0.2,0.4,0.6])
- t2 = np.array([0.1,0.4,0.5,0.6])
+ t1 = np.array([0.2, 0.4, 0.6])
+ t2 = np.array([0.1, 0.4, 0.5, 0.6])
- expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0]
+ expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0]
expected_isi = [0.1/0.2, -0.1/0.3, -0.1/0.3, 0.1/0.2, 0.1/0.2, -0.0/0.5]
t1 = spk.add_auxiliary_spikes(t1, 1.0)
@@ -56,11 +56,11 @@ def test_spike():
t2 = np.array([0.3, 0.45, 0.8, 0.9, 0.95])
# pen&paper calculation of the spike distance
- expected_times = [0.0,0.2,0.3,0.4,0.45,0.6,0.7,0.8,0.9,0.95,1.0]
+ expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0]
s1 = np.array([0.1, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2,
0.15, 0.1, 0.1*0.2/0.3, 0.1**2/0.3, 0.1*0.05/0.3, 0.1])
- s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05,
- (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35,
+ s2 = np.array([0.1, 0.1*0.2/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05,
+ (0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35,
0.1, 0.1, 0.05, 0.05])
isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3])
isi2 = np.array([0.3, 0.3, 0.15, 0.15, 0.35, 0.35, 0.35, 0.1, 0.05, 0.05])
@@ -76,17 +76,17 @@ def test_spike():
assert_array_almost_equal(f.y2, expected_y2, decimal=14)
# check with some equal spike times
- t1 = np.array([0.2,0.4,0.6])
- t2 = np.array([0.1,0.4,0.5,0.6])
+ t1 = np.array([0.2, 0.4, 0.6])
+ t2 = np.array([0.1, 0.4, 0.5, 0.6])
- expected_times = [0.0,0.1,0.2,0.4,0.5,0.6,1.0]
+ expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0]
s1 = np.array([0.1, 0.1*0.1/0.2, 0.1, 0.0, 0.0, 0.0, 0.0])
s2 = np.array([0.1*0.1/0.3, 0.1, 0.1*0.2/0.3, 0.0, 0.1, 0.0, 0.0])
isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4])
isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4])
expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2)
expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2)
-
+
t1 = spk.add_auxiliary_spikes(t1, 1.0)
t2 = spk.add_auxiliary_spikes(t2, 1.0)
f = spk.spike_distance(t1, t2)
@@ -100,8 +100,8 @@ def check_multi_distance(dist_func, dist_func_multi):
# generate spike trains:
t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0)
t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0)
- t3 = spk.add_auxiliary_spikes(np.array([0.2,0.4,0.6]), 1.0)
- t4 = spk.add_auxiliary_spikes(np.array([0.1,0.4,0.5,0.6]), 1.0)
+ t3 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6]), 1.0)
+ t4 = spk.add_auxiliary_spikes(np.array([0.1, 0.4, 0.5, 0.6]), 1.0)
spike_trains = [t1, t2, t3, t4]
f12 = dist_func(t1, t2)
@@ -111,17 +111,17 @@ def check_multi_distance(dist_func, dist_func_multi):
f24 = dist_func(t2, t4)
f34 = dist_func(t3, t4)
- f_multi = dist_func_multi(spike_trains, [0,1])
+ f_multi = dist_func_multi(spike_trains, [0, 1])
assert f_multi.almost_equal(f12, decimal=14)
f = copy(f12)
f.add(f13)
f.add(f23)
f.mul_scalar(1.0/3)
- f_multi = dist_func_multi(spike_trains, [0,1,2])
+ f_multi = dist_func_multi(spike_trains, [0, 1, 2])
assert f_multi.almost_equal(f, decimal=14)
- f.mul_scalar(3) # revert above normalization
+ f.mul_scalar(3) # revert above normalization
f.add(f14)
f.add(f24)
f.add(f34)
@@ -139,6 +139,7 @@ def test_multi_spike():
if __name__ == "__main__":
- test_auxiliary_spikes()
test_isi()
test_spike()
+ test_multi_isi()
+ test_multi_spike()
diff --git a/test/test_function.py b/test/test_function.py
index c0fb3fd..ed7d6bc 100644
--- a/test/test_function.py
+++ b/test/test_function.py
@@ -10,18 +10,18 @@ Distributed under the MIT License (MIT)
from __future__ import print_function
import numpy as np
from copy import copy
-from numpy.testing import assert_equal, assert_almost_equal, \
- assert_array_almost_equal
+from numpy.testing import assert_almost_equal, assert_array_almost_equal
import pyspike as spk
+
def test_pwc():
# some random data
x = [0.0, 1.0, 2.0, 2.5, 4.0]
y = [1.0, -0.5, 1.5, 0.75]
f = spk.PieceWiseConstFunc(x, y)
xp, yp = f.get_plottable_data()
-
+
xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0]
yp_expected = [1.0, 1.0, -0.5, -0.5, 1.5, 1.5, 0.75, 0.75]
assert_array_almost_equal(xp, xp_expected, decimal=16)
@@ -51,17 +51,18 @@ def test_pwc_add():
f2.add(f)
assert_array_almost_equal(f2.x, x_expected, decimal=16)
assert_array_almost_equal(f2.y, y_expected, decimal=16)
-
+
f1.add(f2)
# same x, but y doubled
assert_array_almost_equal(f1.x, f2.x, decimal=16)
assert_array_almost_equal(f1.y, 2*f2.y, decimal=16)
+
def test_pwc_mul():
x = [0.0, 1.0, 2.0, 2.5, 4.0]
y = [1.0, -0.5, 1.5, 0.75]
f = spk.PieceWiseConstFunc(x, y)
-
+
f.mul_scalar(1.5)
assert_array_almost_equal(f.x, x, decimal=16)
assert_array_almost_equal(f.y, 1.5*np.array(y), decimal=16)
@@ -75,15 +76,15 @@ def test_pwl():
y2 = [1.5, -0.4, 1.5, 0.25]
f = spk.PieceWiseLinFunc(x, y1, y2)
xp, yp = f.get_plottable_data()
-
+
xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0]
yp_expected = [1.0, 1.5, -0.5, -0.4, 1.5, 1.5, 0.75, 0.25]
assert_array_almost_equal(xp, xp_expected, decimal=16)
assert_array_almost_equal(yp, yp_expected, decimal=16)
-
+
avrg_expected = (1.25 - 0.45 + 0.75 + 1.5*0.5) / 4.0
assert_almost_equal(f.avrg(), avrg_expected, decimal=16)
-
+
abs_avrg_expected = (1.25 + 0.45 + 0.75 + 1.5*0.5) / 4.0
assert_almost_equal(f.abs_avrg(), abs_avrg_expected, decimal=16)
@@ -113,7 +114,7 @@ def test_pwl_add():
assert_array_almost_equal(f2.x, x_expected, decimal=16)
assert_array_almost_equal(f2.y1, y1_expected, decimal=16)
assert_array_almost_equal(f2.y2, y2_expected, decimal=16)
-
+
f1.add(f2)
# same x, but y doubled
assert_array_almost_equal(f1.x, f2.x, decimal=16)
@@ -121,12 +122,12 @@ def test_pwl_add():
assert_array_almost_equal(f1.y2, 2*f2.y2, decimal=16)
-def test_pwc_mul():
+def test_pwl_mul():
x = [0.0, 1.0, 2.0, 2.5, 4.0]
y1 = [1.0, -0.5, 1.5, 0.75]
y2 = [1.5, -0.4, 1.5, 0.25]
f = spk.PieceWiseLinFunc(x, y1, y2)
-
+
f.mul_scalar(1.5)
assert_array_almost_equal(f.x, x, decimal=16)
assert_array_almost_equal(f.y1, 1.5*np.array(y1), decimal=16)
@@ -137,3 +138,8 @@ def test_pwc_mul():
if __name__ == "__main__":
test_pwc()
+ test_pwc_add()
+ test_pwc_mul()
+ test_pwl()
+ test_pwl_add()
+ test_pwl_mul()
diff --git a/test/test_spikes.py b/test/test_spikes.py
index e008207..349e0bf 100644
--- a/test/test_spikes.py
+++ b/test/test_spikes.py
@@ -23,13 +23,13 @@ def test_auxiliary_spikes():
def test_load_from_txt():
- spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0,4000))
+ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=(0, 4000))
assert len(spike_trains) == 40
# check the first spike train
- spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
- 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
+ spike_times = [0, 64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1,
+ 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7,
3644.3, 3936.3, 4000]
assert_equal(spike_times, spike_trains[0])
@@ -39,15 +39,15 @@ def test_load_from_txt():
assert spike_train[-1] == 4000
# load without adding auxiliary spikes
- spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=None)
+ spike_trains2 = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=None)
assert len(spike_trains2) == 40
# check auxiliary spikes
for i in xrange(len(spike_trains)):
- assert len(spike_trains[i]) == len(spike_trains2[i])+2 # two spikes less
+ assert len(spike_trains[i]) == len(spike_trains2[i])+2 # 2 spikes less
-def check_merged_spikes( merged_spikes, spike_trains ):
+def check_merged_spikes(merged_spikes, spike_trains):
# create a flat array with all spike events
all_spikes = np.array([])
for spike_train in spike_trains:
@@ -55,7 +55,7 @@ def check_merged_spikes( merged_spikes, spike_trains ):
indices = np.zeros_like(all_spikes, dtype='bool')
# check if we find all the spike events in the original spike trains
for x in merged_spikes:
- i = np.where(all_spikes == x)[0][0] # the first axis and the first entry
+ i = np.where(all_spikes == x)[0][0] # first axis and first entry
# change to something impossible so we dont find this event again
all_spikes[i] = -1.0
indices[i] = True
@@ -64,23 +64,22 @@ def check_merged_spikes( merged_spikes, spike_trains ):
def test_merge_spike_trains():
# first load the data
- spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
- time_interval=(0,4000))
+ spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt",
+ time_interval=(0, 4000))
spikes = spk.merge_spike_trains([spike_trains[0], spike_trains[1]])
# test if result is sorted
assert((spikes == np.sort(spikes)).all())
# check merging
- check_merged_spikes( spikes, [spike_trains[0], spike_trains[1]] )
+ check_merged_spikes(spikes, [spike_trains[0], spike_trains[1]])
spikes = spk.merge_spike_trains(spike_trains)
# test if result is sorted
assert((spikes == np.sort(spikes)).all())
# check merging
- check_merged_spikes( spikes, spike_trains )
+ check_merged_spikes(spikes, spike_trains)
if __name__ == "main":
test_auxiliary_spikes()
test_load_from_txt()
test_merge_spike_trains()
-