summaryrefslogtreecommitdiff
path: root/pyspike
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2016-03-29 12:35:51 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2016-03-29 12:35:51 +0200
commitad29154d8c152996d16c012dcc5798c5f1957aed (patch)
treef01575082a649061362178d4a9870cb5fe2c33f6 /pyspike
parent1fa8493ce15af8cd4c057eece155f1557fc241ea (diff)
parentc17cc8602414cec883c412008a4300b2c7ac7f80 (diff)
Merge branch 'master' into new_directionality
Conflicts: pyspike/__init__.py pyspike/cython/cython_directionality.pyx pyspike/cython/directionality_python_backend.py pyspike/spike_directionality.py setup.py
Diffstat (limited to 'pyspike')
-rw-r--r--pyspike/DiscreteFunc.py10
-rw-r--r--pyspike/PieceWiseConstFunc.py6
-rw-r--r--pyspike/PieceWiseLinFunc.py8
-rw-r--r--pyspike/SpikeTrain.py2
-rw-r--r--pyspike/__init__.py22
-rw-r--r--pyspike/cython/cython_distances.pyx134
-rw-r--r--pyspike/cython/cython_profiles.pyx110
-rw-r--r--pyspike/cython/python_backend.py108
-rw-r--r--pyspike/generic.py9
-rw-r--r--pyspike/isi_distance.py157
-rw-r--r--pyspike/psth.py2
-rw-r--r--pyspike/spike_distance.py163
-rw-r--r--pyspike/spike_sync.py164
13 files changed, 598 insertions, 297 deletions
diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py
index 9cc7bd5..fe97bc2 100644
--- a/pyspike/DiscreteFunc.py
+++ b/pyspike/DiscreteFunc.py
@@ -2,7 +2,7 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import numpy as np
import collections
@@ -80,7 +80,7 @@ class DiscreteFunc(object):
expected_mp = (averaging_window_size+1) * int(self.mp[0])
y_plot = np.zeros_like(self.y)
# compute the values in a loop, could be done in cython if required
- for i in xrange(len(y_plot)):
+ for i in range(len(y_plot)):
if self.mp[i] >= expected_mp:
# the current value contains already all the wanted
@@ -206,7 +206,7 @@ expected."
# cython version
try:
- from cython.cython_add import add_discrete_function_cython as \
+ from .cython.cython_add import add_discrete_function_cython as \
add_discrete_function_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -215,7 +215,7 @@ sure that PySpike is installed by running\n\
'python setup.py build_ext --inplace'! \
\n Falling back to slow python backend.")
# use python backend
- from cython.python_backend import add_discrete_function_python as \
+ from .cython.python_backend import add_discrete_function_python as \
add_discrete_function_impl
self.x, self.y, self.mp = \
@@ -244,7 +244,7 @@ def average_profile(profiles):
assert len(profiles) > 1
avrg_profile = profiles[0].copy()
- for i in xrange(1, len(profiles)):
+ for i in range(1, len(profiles)):
avrg_profile.add(profiles[i])
avrg_profile.mul_scalar(1.0/len(profiles)) # normalize
diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py
index 23ff536..5ce5f27 100644
--- a/pyspike/PieceWiseConstFunc.py
+++ b/pyspike/PieceWiseConstFunc.py
@@ -2,7 +2,7 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import numpy as np
import collections
@@ -189,7 +189,7 @@ class PieceWiseConstFunc(object):
# cython version
try:
- from cython.cython_add import add_piece_wise_const_cython as \
+ from .cython.cython_add import add_piece_wise_const_cython as \
add_piece_wise_const_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -198,7 +198,7 @@ sure that PySpike is installed by running\n \
'python setup.py build_ext --inplace'! \
\n Falling back to slow python backend.")
# use python backend
- from cython.python_backend import add_piece_wise_const_python as \
+ from .cython.python_backend import add_piece_wise_const_python as \
add_piece_wise_const_impl
self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y)
diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py
index 0d51c76..8145e63 100644
--- a/pyspike/PieceWiseLinFunc.py
+++ b/pyspike/PieceWiseLinFunc.py
@@ -2,7 +2,7 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import numpy as np
import collections
@@ -222,13 +222,13 @@ class PieceWiseLinFunc:
assert self.x[-1] == f.x[-1], "The functions have different intervals"
# python implementation
- # from python_backend import add_piece_wise_lin_python
+ # from .python_backend import add_piece_wise_lin_python
# self.x, self.y1, self.y2 = add_piece_wise_lin_python(
# self.x, self.y1, self.y2, f.x, f.y1, f.y2)
# cython version
try:
- from cython.cython_add import add_piece_wise_lin_cython as \
+ from .cython.cython_add import add_piece_wise_lin_cython as \
add_piece_wise_lin_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -237,7 +237,7 @@ sure that PySpike is installed by running\n \
'python setup.py build_ext --inplace'! \n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import add_piece_wise_lin_python as \
+ from .cython.python_backend import add_piece_wise_lin_python as \
add_piece_wise_lin_impl
self.x, self.y1, self.y2 = add_piece_wise_lin_impl(
diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py
index 4b59a5d..19f2419 100644
--- a/pyspike/SpikeTrain.py
+++ b/pyspike/SpikeTrain.py
@@ -68,7 +68,7 @@ class SpikeTrain(object):
"""Returns the spikes of this spike train with auxiliary spikes in case
of empty spike trains.
"""
- if len(self.spikes) < 2:
+ if len(self.spikes) < 1:
return np.unique(np.insert([self.t_start, self.t_end], 1,
self.spikes))
else:
diff --git a/pyspike/__init__.py b/pyspike/__init__.py
index 4c1e47e..7f578b0 100644
--- a/pyspike/__init__.py
+++ b/pyspike/__init__.py
@@ -4,28 +4,30 @@ Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
"""
+from __future__ import absolute_import
+
__all__ = ["isi_distance", "spike_distance", "spike_sync", "psth",
"spikes", "spike_directionality", "SpikeTrain",
"PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"]
-from PieceWiseConstFunc import PieceWiseConstFunc
-from PieceWiseLinFunc import PieceWiseLinFunc
-from DiscreteFunc import DiscreteFunc
-from SpikeTrain import SpikeTrain
+from .PieceWiseConstFunc import PieceWiseConstFunc
+from .PieceWiseLinFunc import PieceWiseLinFunc
+from .DiscreteFunc import DiscreteFunc
+from .SpikeTrain import SpikeTrain
-from isi_distance import isi_profile, isi_distance, isi_profile_multi,\
+from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\
isi_distance_multi, isi_distance_matrix
-from spike_distance import spike_profile, spike_distance, spike_profile_multi,\
+from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\
spike_distance_multi, spike_distance_matrix
-from spike_sync import spike_sync_profile, spike_sync,\
+from .spike_sync import spike_sync_profile, spike_sync,\
spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\
filter_by_spike_sync
-from psth import psth
+from .psth import psth
-from spikes import load_spike_trains_from_txt, spike_train_from_string, \
+from .spikes import load_spike_trains_from_txt, spike_train_from_string, \
merge_spike_trains, generate_poisson_spikes
-from spike_directionality import spike_directionality, \
+from .spike_directionality import spike_directionality, \
spike_directionality_profiles, spike_directionality_matrix, \
spike_train_order_profile, spike_train_order, \
spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \
diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx
index c017bf9..f50700f 100644
--- a/pyspike/cython/cython_distances.pyx
+++ b/pyspike/cython/cython_distances.pyx
@@ -55,20 +55,27 @@ def isi_distance_cython(double[:] s1, double[:] s2,
N2 = len(s2)
# first interspike interval - check if a spike exists at the start time
+ # and also account for spike trains with single spikes
if s1[0] > t_start:
- # edge correction
- nu1 = fmax(s1[0]-t_start, s1[1]-s1[0])
+ # edge correction for the first interspike interval:
+ # take the maximum of the distance from the beginning to the first
+ # spike and the interval between the first two spikes.
+ # if there is only one spike, take the its distance to the beginning
+ nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start
index1 = -1
else:
- nu1 = s1[1]-s1[0]
+ # if the first spike is exactly at the start, take the distance
+ # to the next spike. If this is the only spike, take the distance to
+ # the end.
+ nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0]
index1 = 0
if s2[0] > t_start:
- # edge correction
- nu2 = fmax(s2[0]-t_start, s2[1]-s2[0])
+ # edge correction as above
+ nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start
index2 = -1
else:
- nu2 = s2[1]-s2[0]
+ nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0]
index2 = 0
last_t = t_start
@@ -86,8 +93,12 @@ def isi_distance_cython(double[:] s1, double[:] s2,
if index1 < N1-1:
nu1 = s1[index1+1]-s1[index1]
else:
- # edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ # edge correction for the last ISI:
+ # take the max of the distance of the last
+ # spike to the end and the previous ISI. If there was only
+ # one spike, always take the distance to the end.
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
elif (index2 < N2-1) and ((index1 == N1-1) or
(s1[index1+1] > s2[index2+1])):
index2 += 1
@@ -95,8 +106,9 @@ def isi_distance_cython(double[:] s1, double[:] s2,
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
- # edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ # edge correction for the end as above
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
else: # s1[index1+1] == s2[index2+1]
index1 += 1
index2 += 1
@@ -104,13 +116,15 @@ def isi_distance_cython(double[:] s1, double[:] s2,
if index1 < N1-1:
nu1 = s1[index1+1]-s1[index1]
else:
- # edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ # edge correction for the end as above
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
- # edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ # edge correction for the end as above
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
# compute the corresponding isi-distance
isi_value += curr_isi * (curr_t - last_t)
curr_isi = fabs(nu1 - nu2) / fmax(nu1, nu2)
@@ -178,44 +192,60 @@ def spike_distance_cython(double[:] t1, double[:] t2,
cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
cdef double isi1, isi2, s1, s2
cdef double y_start, y_end, t_last, t_current, spike_value
+ cdef double[:] t_aux1 = np.empty(2)
+ cdef double[:] t_aux2 = np.empty(2)
spike_value = 0.0
N1 = len(t1)
N2 = len(t2)
+ # we can assume at least one spikes per spike train
+ assert N1 > 0
+ assert N2 > 0
+
+
with nogil: # release the interpreter to allow multithreading
t_last = t_start
- t_p1 = t_start
- t_p2 = t_start
+ # auxiliary spikes for edge correction - consistent with first/last ISI
+ t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start
+ t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end
+ t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start
+ t_aux2[1] = fmax(t_end, 2*t2[N2-1]+-t2[N2-2]) if N2 > 1 else t_end
+ # print "aux spikes %.15f, %.15f ; %.15f, %.15f" % (t_aux1[0], t_aux1[1], t_aux2[0], t_aux2[1])
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
if t1[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f1 = t1[0]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- isi1 = fmax(t_f1-t_start, t1[1]-t1[0])
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
dt_p1 = dt_f1
- s1 = dt_p1*(t_f1-t_start)/isi1
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
index1 = -1
- else:
- t_f1 = t1[1]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- dt_p1 = 0.0
- isi1 = t1[1]-t1[0]
+ else: # t1[0] == t_start
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
s1 = dt_p1
index1 = 0
if t2[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f2 = t2[0]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
dt_p2 = dt_f2
- isi2 = fmax(t_f2-t_start, t2[1]-t2[0])
- s2 = dt_p2*(t_f2-t_start)/isi2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
index2 = -1
- else:
- t_f2 = t2[1]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
- dt_p2 = 0.0
- isi2 = t2[1]-t2[0]
+ else: # t2[0] == t_start
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ # dt_p2 = t_start-t_p1 # 0.0
+ dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
s2 = dt_p2
index2 = 0
@@ -237,7 +267,7 @@ def spike_distance_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
t_curr = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
@@ -249,14 +279,17 @@ def spike_distance_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index1 < N1-1:
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1-t_p1
s1 = dt_p1
else:
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
# s1 needs adjustment due to change of isi1
- s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
# alternative definition without second normalization
@@ -272,7 +305,7 @@ def spike_distance_cython(double[:] t1, double[:] t2,
if index2 < N2-1:
t_f2 = t2[index2+1]
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
t_curr = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
@@ -284,14 +317,17 @@ def spike_distance_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index2 < N2-1:
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2-t_p2
s2 = dt_p2
else:
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
# s2 needs adjustment due to change of isi2
- s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's correction: no adjustment
+ s2 = dt_p2
# s1 is the same as above, thus we can compute y2 immediately
y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
# alternative definition without second normalization
@@ -311,27 +347,29 @@ def spike_distance_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1 - t_p1
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
if index2 < N2-1:
t_f2 = t2[index2+1]
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2 - t_p2
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
index += 1
t_last = t_curr
# isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
# isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
- s1 = dt_f1*(t_end-t1[N1-1])/isi1
- s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
+ s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
# alternative definition without second normalization
# y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2)
diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx
index 4663f2e..aa24db4 100644
--- a/pyspike/cython/cython_profiles.pyx
+++ b/pyspike/cython/cython_profiles.pyx
@@ -63,18 +63,18 @@ def isi_profile_cython(double[:] s1, double[:] s2,
# first interspike interval - check if a spike exists at the start time
if s1[0] > t_start:
# edge correction
- nu1 = fmax(s1[0]-t_start, s1[1]-s1[0])
+ nu1 = fmax(s1[0]-t_start, s1[1]-s1[0]) if N1 > 1 else s1[0]-t_start
index1 = -1
else:
- nu1 = s1[1]-s1[0]
+ nu1 = s1[1]-s1[0] if N1 > 1 else t_end-s1[0]
index1 = 0
if s2[0] > t_start:
# edge correction
- nu2 = fmax(s2[0]-t_start, s2[1]-s2[0])
+ nu2 = fmax(s2[0]-t_start, s2[1]-s2[0]) if N2 > 1 else s2[0]-t_start
index2 = -1
else:
- nu2 = s2[1]-s2[0]
+ nu2 = s2[1]-s2[0] if N2 > 1 else t_end-s2[0]
index2 = 0
isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2)
@@ -92,7 +92,8 @@ def isi_profile_cython(double[:] s1, double[:] s2,
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
elif (index2 < N2-1) and ((index1 == N1-1) or
(s1[index1+1] > s2[index2+1])):
index2 += 1
@@ -101,7 +102,8 @@ def isi_profile_cython(double[:] s1, double[:] s2,
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
else: # s1[index1+1] == s2[index2+1]
index1 += 1
index2 += 1
@@ -110,12 +112,14 @@ def isi_profile_cython(double[:] s1, double[:] s2,
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = fmax(t_end-s1[index1], nu1)
+ nu1 = fmax(t_end-s1[index1], nu1) if N1 > 1 \
+ else t_end-s1[index1]
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = fmax(t_end-s2[index2], nu2)
+ nu2 = fmax(t_end-s2[index2], nu2) if N2 > 1 \
+ else t_end-s2[index2]
# compute the corresponding isi-distance
isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2)
index += 1
@@ -181,6 +185,8 @@ def spike_profile_cython(double[:] t1, double[:] t2,
cdef double[:] spike_events
cdef double[:] y_starts
cdef double[:] y_ends
+ cdef double[:] t_aux1 = np.empty(2)
+ cdef double[:] t_aux2 = np.empty(2)
cdef int N1, N2, index1, index2, index
cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2
@@ -189,6 +195,10 @@ def spike_profile_cython(double[:] t1, double[:] t2,
N1 = len(t1)
N2 = len(t2)
+ # we can assume at least one spikes per spike train
+ assert N1 > 0
+ assert N2 > 0
+
spike_events = np.empty(N1+N2+2)
y_starts = np.empty(len(spike_events)-1)
@@ -196,36 +206,45 @@ def spike_profile_cython(double[:] t1, double[:] t2,
with nogil: # release the interpreter to allow multithreading
spike_events[0] = t_start
- t_p1 = t_start
- t_p2 = t_start
+ # t_p1 = t_start
+ # t_p2 = t_start
+ # auxiliary spikes for edge correction - consistent with first/last ISI
+ t_aux1[0] = fmin(t_start, 2*t1[0]-t1[1]) if N1 > 1 else t_start
+ t_aux1[1] = fmax(t_end, 2*t1[N1-1]-t1[N1-2]) if N1 > 1 else t_end
+ t_aux2[0] = fmin(t_start, 2*t2[0]-t2[1]) if N2 > 1 else t_start
+ t_aux2[1] = fmax(t_end, 2*t2[N2-1]-t2[N2-2]) if N2 > 1 else t_end
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
if t1[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f1 = t1[0]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- isi1 = fmax(t_f1-t_start, t1[1]-t1[0])
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
dt_p1 = dt_f1
- s1 = dt_p1*(t_f1-t_start)/isi1
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
index1 = -1
else:
- t_f1 = t1[1]
- dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end)
- dt_p1 = 0.0
- isi1 = t1[1]-t1[0]
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ dt_p1 = get_min_dist_cython(t_p1, t2, N2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
s1 = dt_p1
index1 = 0
if t2[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f2 = t2[0]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
dt_p2 = dt_f2
- isi2 = fmax(t_f2-t_start, t2[1]-t2[0])
- s2 = dt_p2*(t_f2-t_start)/isi2
+ isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
index2 = -1
else:
- t_f2 = t2[1]
- dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end)
- dt_p2 = 0.0
- isi2 = t2[1]-t2[0]
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ dt_p2 = get_min_dist_cython(t_p2, t1, N1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
s2 = dt_p2
index2 = 0
@@ -245,7 +264,7 @@ def spike_profile_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
spike_events[index] = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1,
@@ -253,14 +272,17 @@ def spike_profile_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index1 < N1-1:
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1-t_p1
s1 = dt_p1
else:
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
# s1 needs adjustment due to change of isi1
- s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1,
isi2)
@@ -275,7 +297,7 @@ def spike_profile_cython(double[:] t1, double[:] t2,
if index2 < N2-1:
t_f2 = t2[index2+1]
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
spike_events[index] = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1,
@@ -283,14 +305,17 @@ def spike_profile_cython(double[:] t1, double[:] t2,
# now the next interval start value
if index2 < N2-1:
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2-t_p2
s2 = dt_p2
else:
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
# s2 needs adjustment due to change of isi2
- s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's correction: no adjustment
+ s2 = dt_p2
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2)
else: # t_f1 == t_f2 - generate only one event
@@ -306,32 +331,31 @@ def spike_profile_cython(double[:] t1, double[:] t2,
if index1 < N1-1:
t_f1 = t1[index1+1]
dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2,
- t_start, t_end)
+ t_aux2[0], t_aux2[1])
isi1 = t_f1 - t_p1
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
dt_f1 = dt_p1
- isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
if index2 < N2-1:
t_f2 = t2[index2+1]
dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1,
- t_start, t_end)
+ t_aux1[0], t_aux1[1])
isi2 = t_f2 - t_p2
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
dt_f2 = dt_p2
- isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
index += 1
# the last event is the interval end
if spike_events[index-1] == t_end:
index -= 1
else:
spike_events[index] = t_end
- # the ending value of the last interval
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
- s1 = dt_f1*(t_end-t1[N1-1])/isi1
- s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ s1 = dt_f1
+ s2 = dt_f2
y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2)
# end nogil
diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py
index 5c4c75d..11fbe62 100644
--- a/pyspike/cython/python_backend.py
+++ b/pyspike/cython/python_backend.py
@@ -28,17 +28,17 @@ def isi_distance_python(s1, s2, t_start, t_end):
isi_values = np.empty(len(spike_events) - 1)
if s1[0] > t_start:
# edge correction
- nu1 = max(s1[0] - t_start, s1[1] - s1[0])
+ nu1 = max(s1[0] - t_start, s1[1] - s1[0]) if N1 > 1 else s1[0]-t_start
index1 = -1
else:
- nu1 = s1[1] - s1[0]
+ nu1 = s1[1] - s1[0] if N1 > 1 else t_end-s1[0]
index1 = 0
if s2[0] > t_start:
# edge correction
- nu2 = max(s2[0] - t_start, s2[1] - s2[0])
+ nu2 = max(s2[0] - t_start, s2[1] - s2[0]) if N2 > 1 else s2[0]-t_start
index2 = -1
else:
- nu2 = s2[1] - s2[0]
+ nu2 = s2[1] - s2[0] if N2 > 1 else t_end-s2[0]
index2 = 0
isi_values[0] = abs(nu1 - nu2) / max(nu1, nu2)
@@ -52,7 +52,8 @@ def isi_distance_python(s1, s2, t_start, t_end):
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2])
+ nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \
+ else t_end-s1[N1-1]
elif (index2 < N2-1) and (index1 == N1-1 or
s1[index1+1] > s2[index2+1]):
@@ -62,7 +63,8 @@ def isi_distance_python(s1, s2, t_start, t_end):
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2])
+ nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \
+ else t_end-s2[N2-1]
else: # s1[index1 + 1] == s2[index2 + 1]
index1 += 1
@@ -72,12 +74,14 @@ def isi_distance_python(s1, s2, t_start, t_end):
nu1 = s1[index1+1]-s1[index1]
else:
# edge correction
- nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2])
+ nu1 = max(t_end-s1[N1-1], s1[N1-1]-s1[N1-2]) if N1 > 1 \
+ else t_end-s1[N1-1]
if index2 < N2-1:
nu2 = s2[index2+1]-s2[index2]
else:
# edge correction
- nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2])
+ nu2 = max(t_end-s2[N2-1], s2[N2-1]-s2[N2-2]) if N2 > 1 \
+ else t_end-s2[N2-1]
# compute the corresponding isi-distance
isi_values[index] = abs(nu1 - nu2) / \
max(nu1, nu2)
@@ -144,36 +148,48 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
y_starts = np.empty(len(spike_events)-1)
y_ends = np.empty(len(spike_events)-1)
+ t_aux1 = np.zeros(2)
+ t_aux2 = np.zeros(2)
+ t_aux1[0] = min(t_start, t1[0]-(t1[1]-t1[0])) if N1 > 1 else t_start
+ t_aux1[1] = max(t_end, t1[N1-1]+(t1[N1-1]-t1[N1-2])) if N1 > 1 else t_end
+ t_aux2[0] = min(t_start, t2[0]-(t2[1]-t2[0])) if N2 > 1 else t_start
+ t_aux2[1] = max(t_end, t2[N2-1]+(t2[N2-1]-t2[N2-2])) if N2 > 1 else t_end
+ t_p1 = t_start if (t1[0] == t_start) else t_aux1[0]
+ t_p2 = t_start if (t2[0] == t_start) else t_aux2[0]
+
+ # print "t_aux1", t_aux1, ", t_aux2:", t_aux2
+
spike_events[0] = t_start
- t_p1 = t_start
- t_p2 = t_start
if t1[0] > t_start:
t_f1 = t1[0]
- dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end)
+ dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1])
dt_p1 = dt_f1
- isi1 = max(t_f1-t_start, t1[1]-t1[0])
- s1 = dt_p1*(t_f1-t_start)/isi1
+ isi1 = max(t_f1-t_start, t1[1]-t1[0]) if N1 > 1 else t_f1-t_start
+ # s1 = dt_p1*(t_f1-t_start)/isi1
+ s1 = dt_p1
index1 = -1
else:
- dt_p1 = 0.0
- t_f1 = t1[1]
- dt_f1 = get_min_dist(t_f1, t2, 0, t_start, t_end)
- isi1 = t1[1]-t1[0]
+ # dt_p1 = t_start-t_p2
+ t_f1 = t1[1] if N1 > 1 else t_end
+ dt_p1 = get_min_dist(t_p1, t2, 0, t_aux2[0], t_aux2[1])
+ dt_f1 = get_min_dist(t_f1, t2, 0, t_aux2[0], t_aux2[1])
+ isi1 = t_f1-t1[0]
s1 = dt_p1
index1 = 0
if t2[0] > t_start:
# dt_p1 = t2[0]-t_start
t_f2 = t2[0]
- dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end)
+ dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1])
dt_p2 = dt_f2
- isi2 = max(t_f2-t_start, t2[1]-t2[0])
- s2 = dt_p2*(t_f2-t_start)/isi2
+ isi2 = max(t_f2-t_start, t2[1]-t2[0]) if N2 > 1 else t_f2-t_start
+ # s2 = dt_p2*(t_f2-t_start)/isi2
+ s2 = dt_p2
index2 = -1
else:
- dt_p2 = 0.0
- t_f2 = t2[1]
- dt_f2 = get_min_dist(t_f2, t1, 0, t_start, t_end)
- isi2 = t2[1]-t2[0]
+ t_f2 = t2[1] if N2 > 1 else t_end
+ dt_p2 = get_min_dist(t_p2, t1, 0, t_aux1[0], t_aux1[1])
+ dt_f2 = get_min_dist(t_f2, t1, 0, t_aux1[0], t_aux1[1])
+ isi2 = t_f2-t2[0]
s2 = dt_p2
index2 = 0
@@ -193,20 +209,23 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
if index1 < N1-1:
t_f1 = t1[index1+1]
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
spike_events[index] = t_p1
s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2
y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
# now the next interval start value
if index1 < N1-1:
- dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end)
+ dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1])
isi1 = t_f1-t_p1
s1 = dt_p1
else:
dt_f1 = dt_p1
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
# s1 needs adjustment due to change of isi1
- s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # s1 = dt_p1*(t_end-t1[N1-1])/isi1
+ # Eero's correction: no adjustment
+ s1 = dt_p1
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1):
@@ -220,20 +239,23 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
if index2 < N2-1:
t_f2 = t2[index2+1]
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
spike_events[index] = t_p2
s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1
y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
# now the next interval start value
if index2 < N2-1:
- dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end)
+ dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1])
isi2 = t_f2-t_p2
s2 = dt_p2
else:
dt_f2 = dt_p2
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
# s2 needs adjustment due to change of isi2
- s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # s2 = dt_p2*(t_end-t2[N2-1])/isi2
+ # Eero's adjustment: no correction
+ s2 = dt_p2
# s2 is the same as above, thus we can compute y2 immediately
y_starts[index] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
else: # t_f1 == t_f2 - generate only one event
@@ -248,31 +270,31 @@ def spike_distance_python(spikes1, spikes2, t_start, t_end):
y_starts[index] = 0.0
if index1 < N1-1:
t_f1 = t1[index1+1]
- dt_f1 = get_min_dist(t_f1, t2, index2, t_start, t_end)
+ dt_f1 = get_min_dist(t_f1, t2, index2, t_aux2[0], t_aux2[1])
isi1 = t_f1 - t_p1
else:
- t_f1 = t_end
+ t_f1 = t_aux1[1]
dt_f1 = dt_p1
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
+ isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) if N1 > 1 \
+ else t_end-t1[N1-1]
if index2 < N2-1:
t_f2 = t2[index2+1]
- dt_f2 = get_min_dist(t_f2, t1, index1, t_start, t_end)
+ dt_f2 = get_min_dist(t_f2, t1, index1, t_aux1[0], t_aux1[1])
isi2 = t_f2 - t_p2
else:
- t_f2 = t_end
+ t_f2 = t_aux2[1]
dt_f2 = dt_p2
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
+ isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) if N2 > 1 \
+ else t_end-t2[N2-1]
index += 1
+
# the last event is the interval end
if spike_events[index-1] == t_end:
index -= 1
else:
spike_events[index] = t_end
- # the ending value of the last interval
- isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2])
- isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2])
- s1 = dt_f1*(t_end-t1[N1-1])/isi1
- s2 = dt_f2*(t_end-t2[N2-1])/isi2
+ s1 = dt_f1 # *(t_end-t1[N1-1])/isi1
+ s2 = dt_f2 # *(t_end-t2[N2-1])/isi2
y_ends[index-1] = (s1*isi2 + s2*isi1) / (0.5*(isi1+isi2)**2)
# use only the data added above
diff --git a/pyspike/generic.py b/pyspike/generic.py
index 904c3c2..5ad06f1 100644
--- a/pyspike/generic.py
+++ b/pyspike/generic.py
@@ -7,6 +7,7 @@ Copyright 2015, Mario Mulansky <mario.mulansky@gmx.net>
Distributed under the BSD License
"""
+from __future__ import division
import numpy as np
@@ -38,14 +39,14 @@ def _generic_profile_multi(spike_trains, pair_distance_func, indices=None):
L1 = len(pairs1)
if L1 > 1:
dist_prof1 = divide_and_conquer(pairs1[:L1//2],
- pairs1[int(L1//2):])
+ pairs1[L1//2:])
else:
dist_prof1 = pair_distance_func(spike_trains[pairs1[0][0]],
spike_trains[pairs1[0][1]])
L2 = len(pairs2)
if L2 > 1:
dist_prof2 = divide_and_conquer(pairs2[:L2//2],
- pairs2[int(L2//2):])
+ pairs2[L2//2:])
else:
dist_prof2 = pair_distance_func(spike_trains[pairs2[0][0]],
spike_trains[pairs2[0][1]])
@@ -137,8 +138,8 @@ def _generic_distance_matrix(spike_trains, dist_function,
assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \
"Invalid index list."
# generate a list of possible index pairs
- pairs = [(i, j) for i in xrange(len(indices))
- for j in xrange(i+1, len(indices))]
+ pairs = [(i, j) for i in range(len(indices))
+ for j in range(i+1, len(indices))]
distance_matrix = np.zeros((len(indices), len(indices)))
for i, j in pairs:
diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py
index e50f203..e91dce2 100644
--- a/pyspike/isi_distance.py
+++ b/pyspike/isi_distance.py
@@ -2,6 +2,8 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
+from __future__ import absolute_import
+
import pyspike
from pyspike import PieceWiseConstFunc
from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
@@ -11,11 +13,48 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
############################################################
# isi_profile
############################################################
-def isi_profile(spike_train1, spike_train2):
- """ Computes the isi-distance profile :math:`I(t)` of the two given
- spike trains. Retruns the profile as a PieceWiseConstFunc object. The
+def isi_profile(*args, **kwargs):
+ """ Computes the isi-distance profile :math:`I(t)` of the given
+ spike trains. Returns the profile as a PieceWiseConstFunc object. The
ISI-values are defined positive :math:`I(t)>=0`.
+ Valid call structures::
+
+ isi_profile(st1, st2) # returns the bi-variate profile
+ isi_profile(st1, st2, st3) # multi-variate profile of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ isi_profile(spike_trains) # profile of the list of spike trains
+ isi_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate ISI distance profile for a set of spike trains is defined
+ as the average ISI-profile of all pairs of spike-trains:
+
+ .. math:: <I(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
+
+ where the sum goes over all pairs <i,j>
+
+
+ :returns: The isi-distance profile :math:`I(t)`
+ :rtype: :class:`.PieceWiseConstFunc`
+ """
+ if len(args) == 1:
+ return isi_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return isi_profile_bi(args[0], args[1])
+ else:
+ return isi_profile_multi(args)
+
+
+############################################################
+# isi_profile_bi
+############################################################
+def isi_profile_bi(spike_train1, spike_train2):
+ """ Specific function to compute a bivariate ISI-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.isi_profile` to compute ISI-profiles.
+
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
:param spike_train2: Second spike train.
@@ -32,7 +71,7 @@ def isi_profile(spike_train1, spike_train2):
# load cython implementation
try:
- from cython.cython_profiles import isi_profile_cython \
+ from .cython.cython_profiles import isi_profile_cython \
as isi_profile_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -40,7 +79,7 @@ def isi_profile(spike_train1, spike_train2):
PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import isi_distance_python \
+ from .cython.python_backend import isi_distance_python \
as isi_profile_impl
times, values = isi_profile_impl(spike_train1.get_spikes_non_empty(),
@@ -50,15 +89,76 @@ Falling back to slow python backend.")
############################################################
+# isi_profile_multi
+############################################################
+def isi_profile_multi(spike_trains, indices=None):
+ """ Specific function to compute the multivariate ISI-profile for a set of
+ spike trains. This is a deprecated function and should not be called
+ directly. Use :func:`.isi_profile` to compute ISI-profiles.
+
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type state: list or None
+ :returns: The averaged isi profile :math:`<I(t)>`
+ :rtype: :class:`.PieceWiseConstFunc`
+ """
+ average_dist, M = _generic_profile_multi(spike_trains, isi_profile_bi,
+ indices)
+ average_dist.mul_scalar(1.0/M) # normalize
+ return average_dist
+
+
+############################################################
# isi_distance
############################################################
-def isi_distance(spike_train1, spike_train2, interval=None):
+def isi_distance(*args, **kwargs):
""" Computes the ISI-distance :math:`D_I` of the given spike trains. The
isi-distance is the integral over the isi distance profile
:math:`I(t)`:
.. math:: D_I = \\int_{T_0}^{T_1} I(t) dt.
+ In the multivariate case it is the integral over the multivariate
+ ISI-profile, i.e. the average profile over all spike train pairs:
+
+ .. math:: D_I = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
+
+ where the sum goes over all pairs <i,j>
+
+
+
+ Valid call structures::
+
+ isi_distance(st1, st2) # returns the bi-variate distance
+ isi_distance(st1, st2, st3) # multi-variate distance of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ isi_distance(spike_trains) # distance of the list of spike trains
+ isi_distance(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ :returns: The isi-distance :math:`D_I`.
+ :rtype: double
+ """
+
+ if len(args) == 1:
+ return isi_distance_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return isi_distance_bi(args[0], args[1], **kwargs)
+ else:
+ return isi_distance_multi(args, **kwargs)
+
+
+############################################################
+# _isi_distance_bi
+############################################################
+def isi_distance_bi(spike_train1, spike_train2, interval=None):
+ """ Specific function to compute the bivariate ISI-distance.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.isi_distance` to compute ISI-distances.
+
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
:param spike_train2: Second spike train.
@@ -74,7 +174,7 @@ def isi_distance(spike_train1, spike_train2, interval=None):
# distance over the whole interval is requested: use specific function
# for optimal performance
try:
- from cython.cython_distances import isi_distance_cython \
+ from .cython.cython_distances import isi_distance_cython \
as isi_distance_impl
return isi_distance_impl(spike_train1.get_spikes_non_empty(),
@@ -82,46 +182,19 @@ def isi_distance(spike_train1, spike_train2, interval=None):
spike_train1.t_start, spike_train1.t_end)
except ImportError:
# Cython backend not available: fall back to profile averaging
- return isi_profile(spike_train1, spike_train2).avrg(interval)
+ return isi_profile_bi(spike_train1, spike_train2).avrg(interval)
else:
# some specific interval is provided: use profile
- return isi_profile(spike_train1, spike_train2).avrg(interval)
-
-
-############################################################
-# isi_profile_multi
-############################################################
-def isi_profile_multi(spike_trains, indices=None):
- """ computes the multi-variate isi distance profile for a set of spike
- trains. That is the average isi-distance of all pairs of spike-trains:
-
- .. math:: <I(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
-
- where the sum goes over all pairs <i,j>
-
- :param spike_trains: list of :class:`.SpikeTrain`
- :param indices: list of indices defining which spike trains to use,
- if None all given spike trains are used (default=None)
- :type state: list or None
- :returns: The averaged isi profile :math:`<I(t)>`
- :rtype: :class:`.PieceWiseConstFunc`
- """
- average_dist, M = _generic_profile_multi(spike_trains, isi_profile,
- indices)
- average_dist.mul_scalar(1.0/M) # normalize
- return average_dist
+ return isi_profile_bi(spike_train1, spike_train2).avrg(interval)
############################################################
# isi_distance_multi
############################################################
def isi_distance_multi(spike_trains, indices=None, interval=None):
- """ computes the multi-variate isi-distance for a set of spike-trains.
- That is the time average of the multi-variate spike profile:
-
- .. math:: D_I = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>} I^{i,j},
-
- where the sum goes over all pairs <i,j>
+ """ Specific function to compute the multivariate ISI-distance.
+ This is a deprecfated function and should not be called directly. Use
+ :func:`.isi_distance` to compute ISI-distances.
:param spike_trains: list of :class:`.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
@@ -132,7 +205,7 @@ def isi_distance_multi(spike_trains, indices=None, interval=None):
:returns: The time-averaged multivariate ISI distance :math:`D_I`
:rtype: double
"""
- return _generic_distance_multi(spike_trains, isi_distance, indices,
+ return _generic_distance_multi(spike_trains, isi_distance_bi, indices,
interval)
@@ -153,5 +226,5 @@ def isi_distance_matrix(spike_trains, indices=None, interval=None):
:math:`D_{I}^{ij}`
:rtype: np.array
"""
- return _generic_distance_matrix(spike_trains, isi_distance,
- indices, interval)
+ return _generic_distance_matrix(spike_trains, isi_distance_bi,
+ indices=indices, interval=interval)
diff --git a/pyspike/psth.py b/pyspike/psth.py
index 4027215..7cf1140 100644
--- a/pyspike/psth.py
+++ b/pyspike/psth.py
@@ -24,7 +24,7 @@ def psth(spike_trains, bin_size):
# N = len(spike_trains)
combined_spike_train = spike_trains[0].spikes
- for i in xrange(1, len(spike_trains)):
+ for i in range(1, len(spike_trains)):
combined_spike_train = np.append(combined_spike_train,
spike_trains[i].spikes)
diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py
index feea0c1..0fd86c1 100644
--- a/pyspike/spike_distance.py
+++ b/pyspike/spike_distance.py
@@ -2,6 +2,8 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
+from __future__ import absolute_import
+
import pyspike
from pyspike import PieceWiseLinFunc
from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
@@ -11,10 +13,46 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \
############################################################
# spike_profile
############################################################
-def spike_profile(spike_train1, spike_train2):
- """ Computes the spike-distance profile :math:`S(t)` of the two given spike
- trains. Returns the profile as a PieceWiseLinFunc object. The SPIKE-values
- are defined positive :math:`S(t)>=0`.
+def spike_profile(*args, **kwargs):
+ """ Computes the spike-distance profile :math:`S(t)` of the given
+ spike trains. Returns the profile as a PieceWiseConstLin object. The
+ SPIKE-values are defined positive :math:`S(t)>=0`.
+
+ Valid call structures::
+
+ spike_profile(st1, st2) # returns the bi-variate profile
+ spike_profile(st1, st2, st3) # multi-variate profile of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_profile(spike_trains) # profile of the list of spike trains
+ spike_profile(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate spike-distance profile is defined as the average of all
+ pairs of spike-trains:
+
+ .. math:: <S(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} S^{i, j}`,
+
+ where the sum goes over all pairs <i,j>
+
+ :returns: The spike-distance profile :math:`S(t)`
+ :rtype: :class:`.PieceWiseConstLin`
+ """
+ if len(args) == 1:
+ return spike_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_profile_bi(args[0], args[1])
+ else:
+ return spike_profile_multi(args)
+
+
+############################################################
+# spike_profile_bi
+############################################################
+def spike_profile_bi(spike_train1, spike_train2):
+ """ Specific function to compute a bivariate SPIKE-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_profile` to compute SPIKE-profiles.
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
@@ -32,7 +70,7 @@ def spike_profile(spike_train1, spike_train2):
# cython implementation
try:
- from cython.cython_profiles import spike_profile_cython \
+ from .cython.cython_profiles import spike_profile_cython \
as spike_profile_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -40,7 +78,7 @@ def spike_profile(spike_train1, spike_train2):
PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import spike_distance_python \
+ from .cython.python_backend import spike_distance_python \
as spike_profile_impl
times, y_starts, y_ends = spike_profile_impl(
@@ -52,14 +90,74 @@ Falling back to slow python backend.")
############################################################
+# spike_profile_multi
+############################################################
+def spike_profile_multi(spike_trains, indices=None):
+ """ Specific function to compute a multivariate SPIKE-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_profile` to compute SPIKE-profiles.
+
+ :param spike_trains: list of :class:`.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :returns: The averaged spike profile :math:`<S>(t)`
+ :rtype: :class:`.PieceWiseLinFunc`
+
+ """
+ average_dist, M = _generic_profile_multi(spike_trains, spike_profile_bi,
+ indices)
+ average_dist.mul_scalar(1.0/M) # normalize
+ return average_dist
+
+
+############################################################
# spike_distance
############################################################
-def spike_distance(spike_train1, spike_train2, interval=None):
- """ Computes the spike-distance :math:`D_S` of the given spike trains. The
+def spike_distance(*args, **kwargs):
+ """ Computes the SPIKE-distance :math:`D_S` of the given spike trains. The
spike-distance is the integral over the spike distance profile
- :math:`S(t)`:
+ :math:`D(t)`:
+
+ .. math:: D_S = \\int_{T_0}^{T_1} S(t) dt.
+
+
+ Valid call structures::
+
+ spike_distance(st1, st2) # returns the bi-variate distance
+ spike_distance(st1, st2, st3) # multi-variate distance of 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_distance(spike_trains) # distance of the list of spike trains
+ spike_distance(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
- .. math:: D_S = \int_{T_0}^{T_1} S(t) dt.
+ In the multivariate case, the spike distance is given as the integral over
+ the multivariate profile, that is the average profile of all spike train
+ pairs:
+
+ .. math:: D_S = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>}
+ S^{i, j} dt
+
+ :returns: The spike-distance :math:`D_S`.
+ :rtype: double
+ """
+
+ if len(args) == 1:
+ return spike_distance_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_distance_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_distance_multi(args, **kwargs)
+
+
+############################################################
+# spike_distance_bi
+############################################################
+def spike_distance_bi(spike_train1, spike_train2, interval=None):
+ """ Specific function to compute a bivariate SPIKE-distance. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_distance` to compute SPIKE-distances.
:param spike_train1: First spike train.
:type spike_train1: :class:`.SpikeTrain`
@@ -76,7 +174,7 @@ def spike_distance(spike_train1, spike_train2, interval=None):
# distance over the whole interval is requested: use specific function
# for optimal performance
try:
- from cython.cython_distances import spike_distance_cython \
+ from .cython.cython_distances import spike_distance_cython \
as spike_distance_impl
return spike_distance_impl(spike_train1.get_spikes_non_empty(),
spike_train2.get_spikes_non_empty(),
@@ -84,48 +182,19 @@ def spike_distance(spike_train1, spike_train2, interval=None):
spike_train1.t_end)
except ImportError:
# Cython backend not available: fall back to average profile
- return spike_profile(spike_train1, spike_train2).avrg(interval)
+ return spike_profile_bi(spike_train1, spike_train2).avrg(interval)
else:
# some specific interval is provided: compute the whole profile
- return spike_profile(spike_train1, spike_train2).avrg(interval)
-
-
-############################################################
-# spike_profile_multi
-############################################################
-def spike_profile_multi(spike_trains, indices=None):
- """ Computes the multi-variate spike distance profile for a set of spike
- trains. That is the average spike-distance of all pairs of spike-trains:
-
- .. math:: <S(t)> = \\frac{2}{N(N-1)} \\sum_{<i,j>} S^{i, j}`,
-
- where the sum goes over all pairs <i,j>
-
- :param spike_trains: list of :class:`.SpikeTrain`
- :param indices: list of indices defining which spike trains to use,
- if None all given spike trains are used (default=None)
- :type indices: list or None
- :returns: The averaged spike profile :math:`<S>(t)`
- :rtype: :class:`.PieceWiseLinFunc`
-
- """
- average_dist, M = _generic_profile_multi(spike_trains, spike_profile,
- indices)
- average_dist.mul_scalar(1.0/M) # normalize
- return average_dist
+ return spike_profile_bi(spike_train1, spike_train2).avrg(interval)
############################################################
# spike_distance_multi
############################################################
def spike_distance_multi(spike_trains, indices=None, interval=None):
- """ Computes the multi-variate spike distance for a set of spike trains.
- That is the time average of the multi-variate spike profile:
-
- .. math:: D_S = \\int_0^T \\frac{2}{N(N-1)} \\sum_{<i,j>}
- S^{i, j} dt
-
- where the sum goes over all pairs <i,j>
+ """ Specific function to compute a multivariate SPIKE-distance. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_distance` to compute SPIKE-distances.
:param spike_trains: list of :class:`.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
@@ -137,7 +206,7 @@ def spike_distance_multi(spike_trains, indices=None, interval=None):
:returns: The averaged multi-variate spike distance :math:`D_S`.
:rtype: double
"""
- return _generic_distance_multi(spike_trains, spike_distance, indices,
+ return _generic_distance_multi(spike_trains, spike_distance_bi, indices,
interval)
@@ -158,5 +227,5 @@ def spike_distance_matrix(spike_trains, indices=None, interval=None):
:math:`D_S^{ij}`
:rtype: np.array
"""
- return _generic_distance_matrix(spike_trains, spike_distance,
+ return _generic_distance_matrix(spike_trains, spike_distance_bi,
indices, interval)
diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py
index 7f1bce8..37590b4 100644
--- a/pyspike/spike_sync.py
+++ b/pyspike/spike_sync.py
@@ -3,6 +3,8 @@
# Copyright 2014-2015, Mario Mulansky <mario.mulansky@gmx.net>
# Distributed under the BSD License
+from __future__ import absolute_import
+
import numpy as np
from functools import partial
import pyspike
@@ -13,11 +15,48 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix
############################################################
# spike_sync_profile
############################################################
-def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
- """ Computes the spike-synchronization profile S_sync(t) of the two given
- spike trains. Returns the profile as a DiscreteFunction object. The S_sync
- values are either 1 or 0, indicating the presence or absence of a
- coincidence.
+def spike_sync_profile(*args, **kwargs):
+ """ Computes the spike-synchronization profile S_sync(t) of the given
+ spike trains. Returns the profile as a DiscreteFunction object. In the
+ bivariate case, he S_sync values are either 1 or 0, indicating the presence
+ or absence of a coincidence. For multi-variate cases, each spike in the set
+ of spike trains, the profile is defined as the number of coincidences
+ divided by the number of spike trains pairs involving the spike train of
+ containing this spike, which is the number of spike trains minus one (N-1).
+
+ Valid call structures::
+
+ spike_sync_profile(st1, st2) # returns the bi-variate profile
+ spike_sync_profile(st1, st2, st3) # multi-variate profile of 3 sts
+
+ sts = [st1, st2, st3, st4] # list of spike trains
+ spike_sync_profile(sts) # profile of the list of spike trains
+ spike_sync_profile(sts, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ In the multivariate case, the profile is defined as the number of
+ coincidences for each spike in the set of spike trains divided by the
+ number of spike trains pairs involving the spike train of containing this
+ spike, which is the number of spike trains minus one (N-1).
+
+ :returns: The spike-sync profile :math:`S_{sync}(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ if len(args) == 1:
+ return spike_sync_profile_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_sync_profile_bi(args[0], args[1])
+ else:
+ return spike_sync_profile_multi(args)
+
+
+############################################################
+# spike_sync_profile_bi
+############################################################
+def spike_sync_profile_bi(spike_train1, spike_train2, max_tau=None):
+ """ Specific function to compute a bivariate SPIKE-Sync-profile. This is a
+ deprecated function and should not be called directly. Use
+ :func:`.spike_sync_profile` to compute SPIKE-Sync-profiles.
:param spike_train1: First spike train.
:type spike_train1: :class:`pyspike.SpikeTrain`
@@ -25,7 +64,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
:type spike_train2: :class:`pyspike.SpikeTrain`
:param max_tau: Maximum coincidence window size. If 0 or `None`, the
coincidence window has no upper bound.
- :returns: The spike-distance profile :math:`S_{sync}(t)`.
+ :returns: The spike-sync profile :math:`S_{sync}(t)`.
:rtype: :class:`pyspike.function.DiscreteFunction`
"""
@@ -37,7 +76,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
# cython implementation
try:
- from cython.cython_profiles import coincidence_profile_cython \
+ from .cython.cython_profiles import coincidence_profile_cython \
as coincidence_profile_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -45,7 +84,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None):
PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import coincidence_python \
+ from .cython.python_backend import coincidence_python \
as coincidence_profile_impl
if max_tau is None:
@@ -60,6 +99,31 @@ Falling back to slow python backend.")
############################################################
+# spike_sync_profile_multi
+############################################################
+def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None):
+ """ Specific function to compute a multivariate SPIKE-Sync-profile.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync_profile` to compute SPIKE-Sync-profiles.
+
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+
+ """
+ prof_func = partial(spike_sync_profile_bi, max_tau=max_tau)
+ average_prof, M = _generic_profile_multi(spike_trains, prof_func,
+ indices)
+ # average_dist.mul_scalar(1.0/M) # no normalization here!
+ return average_prof
+
+
+############################################################
# _spike_sync_values
############################################################
def _spike_sync_values(spike_train1, spike_train2, interval, max_tau):
@@ -73,7 +137,7 @@ def _spike_sync_values(spike_train1, spike_train2, interval, max_tau):
# distance over the whole interval is requested: use specific function
# for optimal performance
try:
- from cython.cython_distances import coincidence_value_cython \
+ from .cython.cython_distances import coincidence_value_cython \
as coincidence_value_impl
if max_tau is None:
max_tau = 0.0
@@ -85,24 +149,58 @@ def _spike_sync_values(spike_train1, spike_train2, interval, max_tau):
return c, mp
except ImportError:
# Cython backend not available: fall back to profile averaging
- return spike_sync_profile(spike_train1, spike_train2,
- max_tau).integral(interval)
+ return spike_sync_profile_bi(spike_train1, spike_train2,
+ max_tau).integral(interval)
else:
# some specific interval is provided: use profile
- return spike_sync_profile(spike_train1, spike_train2,
- max_tau).integral(interval)
+ return spike_sync_profile_bi(spike_train1, spike_train2,
+ max_tau).integral(interval)
############################################################
# spike_sync
############################################################
-def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None):
+def spike_sync(*args, **kwargs):
""" Computes the spike synchronization value SYNC of the given spike
trains. The spike synchronization value is the computed as the total number
of coincidences divided by the total number of spikes:
.. math:: SYNC = \sum_n C_n / N.
+
+ Valid call structures::
+
+ spike_sync(st1, st2) # returns the bi-variate spike synchronization
+ spike_sync(st1, st2, st3) # multi-variate result for 3 spike trains
+
+ spike_trains = [st1, st2, st3, st4] # list of spike trains
+ spike_sync(spike_trains) # spike-sync of the list of spike trains
+ spike_sync(spike_trains, indices=[0, 1]) # use only the spike trains
+ # given by the indices
+
+ The multivariate SPIKE-Sync is again defined as the overall ratio of all
+ coincidence values divided by the total number of spikes.
+
+ :returns: The spike synchronization value.
+ :rtype: `double`
+ """
+
+ if len(args) == 1:
+ return spike_sync_multi(args[0], **kwargs)
+ elif len(args) == 2:
+ return spike_sync_bi(args[0], args[1], **kwargs)
+ else:
+ return spike_sync_multi(args, **kwargs)
+
+
+############################################################
+# spike_sync_bi
+############################################################
+def spike_sync_bi(spike_train1, spike_train2, interval=None, max_tau=None):
+ """ Specific function to compute a bivariate SPIKE-Sync value.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync` to compute SPIKE-Sync values.
+
:param spike_train1: First spike train.
:type spike_train1: :class:`pyspike.SpikeTrain`
:param spike_train2: Second spike train.
@@ -121,38 +219,12 @@ def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None):
############################################################
-# spike_sync_profile_multi
-############################################################
-def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None):
- """ Computes the multi-variate spike synchronization profile for a set of
- spike trains. For each spike in the set of spike trains, the multi-variate
- profile is defined as the number of coincidences divided by the number of
- spike trains pairs involving the spike train of containing this spike,
- which is the number of spike trains minus one (N-1).
-
- :param spike_trains: list of :class:`pyspike.SpikeTrain`
- :param indices: list of indices defining which spike trains to use,
- if None all given spike trains are used (default=None)
- :type indices: list or None
- :param max_tau: Maximum coincidence window size. If 0 or `None`, the
- coincidence window has no upper bound.
- :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
- :rtype: :class:`pyspike.function.DiscreteFunction`
-
- """
- prof_func = partial(spike_sync_profile, max_tau=max_tau)
- average_prof, M = _generic_profile_multi(spike_trains, prof_func,
- indices)
- # average_dist.mul_scalar(1.0/M) # no normalization here!
- return average_prof
-
-
-############################################################
# spike_sync_multi
############################################################
def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None):
- """ Computes the multi-variate spike synchronization value for a set of
- spike trains.
+ """ Specific function to compute a multivariate SPIKE-Sync value.
+ This is a deprecated function and should not be called directly. Use
+ :func:`.spike_sync` to compute SPIKE-Sync values.
:param spike_trains: list of :class:`pyspike.SpikeTrain`
:param indices: list of indices defining which spike trains to use,
@@ -209,7 +281,7 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None):
:rtype: np.array
"""
- dist_func = partial(spike_sync, max_tau=max_tau)
+ dist_func = partial(spike_sync_bi, max_tau=max_tau)
return _generic_distance_matrix(spike_trains, dist_func,
indices, interval)
@@ -228,7 +300,7 @@ def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None,
# cython implementation
try:
- from cython.cython_profiles import coincidence_single_profile_cython \
+ from .cython.cython_profiles import coincidence_single_profile_cython \
as coincidence_impl
except ImportError:
if not(pyspike.disable_backend_warning):
@@ -237,7 +309,7 @@ sure that PySpike is installed by running\n \
'python setup.py build_ext --inplace'!\n \
Falling back to slow python backend.")
# use python backend
- from cython.python_backend import coincidence_single_python \
+ from .cython.python_backend import coincidence_single_python \
as coincidence_impl
if max_tau is None: