summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2015-04-24 12:08:05 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2015-04-24 12:08:05 +0200
commit7da6da8533f9f76a99b959c9de37138377119ffc (patch)
treecb62f32240db22be53b6faea844f5039bf28a161
parented85a9b72edcb7bba6ae1105e213b3b0a2f78d3a (diff)
changed spike sync implementation to SpikeTrain
-rw-r--r--pyspike/SpikeTrain.py8
-rw-r--r--pyspike/cython/cython_distance.pyx37
-rw-r--r--pyspike/spike_sync.py37
-rw-r--r--test/test_distance.py62
4 files changed, 83 insertions, 61 deletions
diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py
index 4760014..041f897 100644
--- a/pyspike/SpikeTrain.py
+++ b/pyspike/SpikeTrain.py
@@ -21,14 +21,14 @@ class SpikeTrain:
"""
# TODO: sanity checks
- self.spikes = np.array(spike_times)
+ self.spikes = np.array(spike_times, dtype=float)
# check if interval is as sequence
if not isinstance(interval, collections.Sequence):
# treat value as end time and assume t_start = 0
self.t_start = 0.0
- self.t_end = interval
+ self.t_end = float(interval)
else:
# extract times from sequence
- self.t_start = interval[0]
- self.t_end = interval[1]
+ self.t_start = float(interval[0])
+ self.t_end = float(interval[1])
diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx
index 7999e0a..6d998b9 100644
--- a/pyspike/cython/cython_distance.pyx
+++ b/pyspike/cython/cython_distance.pyx
@@ -339,11 +339,11 @@ def spike_distance_cython(double[:] t1, double[:] t2,
cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
int i, int j, max_tau):
cdef double m = 1E100 # some huge number
- cdef int N1 = len(spikes1)-2
- cdef int N2 = len(spikes2)-2
- if i < N1:
+ cdef int N1 = len(spikes1)-1
+ cdef int N2 = len(spikes2)-1
+ if i < N1 and i > -1:
m = fmin(m, spikes1[i+1]-spikes1[i])
- if j < N2:
+ if j < N2 and j > -1:
m = fmin(m, spikes2[j+1]-spikes2[j])
if i > 1:
m = fmin(m, spikes1[i]-spikes1[i-1])
@@ -358,34 +358,35 @@ cdef inline double get_tau(double[:] spikes1, double[:] spikes2,
############################################################
# coincidence_cython
############################################################
-def coincidence_cython(double[:] spikes1, double[:] spikes2, double max_tau):
+def coincidence_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
cdef int N1 = len(spikes1)
cdef int N2 = len(spikes2)
- cdef int i = 0
- cdef int j = 0
+ cdef int i = -1
+ cdef int j = -1
cdef int n = 0
- cdef double[:] st = np.zeros(N1 + N2 - 2) # spike times
- cdef double[:] c = np.zeros(N1 + N2 - 2) # coincidences
- cdef double[:] mp = np.ones(N1 + N2 - 2) # multiplicity
+ cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times
+ cdef double[:] c = np.zeros(N1 + N2 + 2) # coincidences
+ cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity
cdef double tau
- while n < N1 + N2 - 2:
- if spikes1[i+1] < spikes2[j+1]:
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (spikes1[i+1] < spikes2[j+1] or j == N2-1):
i += 1
n += 1
tau = get_tau(spikes1, spikes2, i, j, max_tau)
st[n] = spikes1[i]
- if j > 0 and spikes1[i]-spikes2[j] < tau:
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
# coincidence between the current spike and the previous spike
# both get marked with 1
c[n] = 1
c[n-1] = 1
- elif spikes1[i+1] > spikes2[j+1]:
+ elif (j < N2-1) and (spikes1[i+1] > spikes2[j+1] or i == N1-1):
j += 1
n += 1
tau = get_tau(spikes1, spikes2, i, j, max_tau)
st[n] = spikes2[j]
- if i > 0 and spikes2[j]-spikes1[i] < tau:
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
# coincidence between the current spike and the previous spike
# both get marked with 1
c[n] = 1
@@ -394,8 +395,6 @@ def coincidence_cython(double[:] spikes1, double[:] spikes2, double max_tau):
# advance in both spike trains
j += 1
i += 1
- if i == N1-1 or j == N2-1:
- break
n += 1
# add only one event, but with coincidence 2 and multiplicity 2
st[n] = spikes1[i]
@@ -406,8 +405,8 @@ def coincidence_cython(double[:] spikes1, double[:] spikes2, double max_tau):
c = c[:n+2]
mp = mp[:n+2]
- st[0] = spikes1[0]
- st[len(st)-1] = spikes1[len(spikes1)-1]
+ st[0] = t_start
+ st[len(st)-1] = t_end
c[0] = c[1]
c[len(c)-1] = c[len(c)-2]
mp[0] = mp[1]
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py
index e12ebb8..bca6f73 100644
--- a/pyspike/spike_sync.py
+++ b/pyspike/spike_sync.py
@@ -16,22 +16,27 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix
############################################################
# spike_sync_profile
############################################################
-def spike_sync_profile(spikes1, spikes2, max_tau=None):
+def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
""" Computes the spike-synchronization profile S_sync(t) of the two given
spike trains. Returns the profile as a DiscreteFunction object. The S_sync
values are either 1 or 0, indicating the presence or absence of a
- coincidence. The spike trains are expected to have auxiliary spikes at the
- beginning and end of the interval. Use the function add_auxiliary_spikes to
- add those spikes to the spike train.
+ coincidence.
- :param spikes1: ordered array of spike times with auxiliary spikes.
- :param spikes2: ordered array of spike times with auxiliary spikes.
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
:param max_tau: Maximum coincidence window size. If 0 or `None`, the
coincidence window has no upper bound.
:returns: The spike-distance profile :math:`S_{sync}(t)`.
:rtype: :class:`pyspike.function.DiscreteFunction`
"""
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains seems not to have auxiliary spikes!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains seems not to have auxiliary spikes!"
# cython implementation
try:
@@ -48,7 +53,10 @@ Falling back to slow python backend.")
if max_tau is None:
max_tau = 0.0
- times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2,
+ times, coincidences, multiplicity = coincidence_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
max_tau)
return DiscreteFunc(times, coincidences, multiplicity)
@@ -57,15 +65,17 @@ Falling back to slow python backend.")
############################################################
# spike_sync
############################################################
-def spike_sync(spikes1, spikes2, interval=None, max_tau=None):
+def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None):
""" Computes the spike synchronization value SYNC of the given spike
trains. The spike synchronization value is the computed as the total number
of coincidences divided by the total number of spikes:
.. math:: SYNC = \sum_n C_n / N.
- :param spikes1: ordered array of spike times with auxiliary spikes.
- :param spikes2: ordered array of spike times with auxiliary spikes.
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
:param interval: averaging interval given as a pair of floats (T0, T1),
if None the average over the whole function is computed.
:type interval: Pair of floats or None.
@@ -74,7 +84,8 @@ def spike_sync(spikes1, spikes2, interval=None, max_tau=None):
:returns: The spike synchronization value.
:rtype: double
"""
- return spike_sync_profile(spikes1, spikes2, max_tau).avrg(interval)
+ return spike_sync_profile(spike_train1, spike_train2,
+ max_tau).avrg(interval)
############################################################
@@ -87,7 +98,7 @@ def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None):
spike trains pairs involving the spike train of containing this spike,
which is the number of spike trains minus one (N-1).
- :param spike_trains: list of spike trains
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
if None all given spike trains are used (default=None)
:type indices: list or None
@@ -134,7 +145,7 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None):
""" Computes the overall spike-synchronization value of all pairs of
spike-trains.
- :param spike_trains: list of spike trains
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
if None all given spike trains are used (default=None)
:type indices: list or None
diff --git a/test/test_distance.py b/test/test_distance.py
index 4af0e63..dbb72f1 100644
--- a/test/test_distance.py
+++ b/test/test_distance.py
@@ -138,10 +138,17 @@ def test_spike():
def test_spike_sync():
- spikes1 = np.array([1.0, 2.0, 3.0])
- spikes2 = np.array([2.1])
- spikes1 = spk.add_auxiliary_spikes(spikes1, 4.0)
- spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0)
+ spikes1 = SpikeTrain([1.0, 2.0, 3.0], 4.0)
+ spikes2 = SpikeTrain([2.1], 4.0)
+
+ expected_x = np.array([0.0, 1.0, 2.0, 2.1, 3.0, 4.0])
+ expected_y = np.array([0.0, 0.0, 1.0, 1.0, 0.0, 0.0])
+
+ f = spk.spike_sync_profile(spikes1, spikes2)
+
+ assert_array_almost_equal(f.x, expected_x, decimal=16)
+ assert_array_almost_equal(f.y, expected_y, decimal=16)
+
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
@@ -149,28 +156,34 @@ def test_spike_sync():
assert_almost_equal(spk.spike_sync(spikes1, spikes2, max_tau=0.05),
0.0, decimal=16)
- spikes2 = np.array([3.1])
- spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0)
+ spikes2 = SpikeTrain([3.1], 4.0)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
- spikes2 = np.array([1.1])
- spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0)
+ spikes2 = SpikeTrain([1.1], 4.0)
+
+ expected_x = np.array([0.0, 1.0, 1.1, 2.0, 3.0, 4.0])
+ expected_y = np.array([1.0, 1.0, 1.0, 0.0, 0.0, 0.0])
+
+ f = spk.spike_sync_profile(spikes1, spikes2)
+
+ assert_array_almost_equal(f.x, expected_x, decimal=16)
+ assert_array_almost_equal(f.y, expected_y, decimal=16)
+
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
- spikes2 = np.array([0.9])
- spikes2 = spk.add_auxiliary_spikes(spikes2, 4.0)
+ spikes2 = SpikeTrain([0.9], 4.0)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
def check_multi_profile(profile_func, profile_func_multi):
# generate spike trains:
- t1 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6, 0.7]), 1.0)
- t2 = spk.add_auxiliary_spikes(np.array([0.3, 0.45, 0.8, 0.9, 0.95]), 1.0)
- t3 = spk.add_auxiliary_spikes(np.array([0.2, 0.4, 0.6]), 1.0)
- t4 = spk.add_auxiliary_spikes(np.array([0.1, 0.4, 0.5, 0.6]), 1.0)
+ t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
+ t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
+ t3 = SpikeTrain([0.2, 0.4, 0.6], 1.0)
+ t4 = SpikeTrain([0.1, 0.4, 0.5, 0.6], 1.0)
spike_trains = [t1, t2, t3, t4]
f12 = profile_func(t1, t2)
@@ -213,15 +226,12 @@ def test_multi_spike():
def test_multi_spike_sync():
# some basic multivariate check
- spikes1 = np.array([100, 300, 400, 405, 410, 500, 700, 800,
- 805, 810, 815, 900], dtype=float)
- spikes2 = np.array([100, 200, 205, 210, 295, 350, 400, 510,
- 600, 605, 700, 910], dtype=float)
- spikes3 = np.array([100, 180, 198, 295, 412, 420, 510, 640,
- 695, 795, 820, 920], dtype=float)
- spikes1 = spk.add_auxiliary_spikes(spikes1, 1000)
- spikes2 = spk.add_auxiliary_spikes(spikes2, 1000)
- spikes3 = spk.add_auxiliary_spikes(spikes3, 1000)
+ spikes1 = SpikeTrain([100, 300, 400, 405, 410, 500, 700, 800,
+ 805, 810, 815, 900], 1000)
+ spikes2 = SpikeTrain([100, 200, 205, 210, 295, 350, 400, 510,
+ 600, 605, 700, 910], 1000)
+ spikes3 = SpikeTrain([100, 180, 198, 295, 412, 420, 510, 640,
+ 695, 795, 820, 920], 1000)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=15)
assert_almost_equal(spk.spike_sync(spikes1, spikes3),
@@ -326,5 +336,7 @@ def test_multi_variate_subsets():
if __name__ == "__main__":
test_isi()
test_spike()
- # test_multi_isi()
- # test_multi_spike()
+ test_spike_sync()
+ test_multi_isi()
+ test_multi_spike()
+ test_multi_spike_sync()