summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMario Mulansky <mario.mulansky@gmx.net>2015-08-26 17:40:09 +0200
committerMario Mulansky <mario.mulansky@gmx.net>2018-06-02 12:58:46 -0700
commit827be39a1646f1e518f7210b8943006f5741144d (patch)
tree4c8be864244758fc10da5746ffe193de650f58f1
parent0c710a2450a055091fe6de2313d6240671d1c74d (diff)
further refactoring of directionality
-rw-r--r--pyspike/__init__.py7
-rw-r--r--pyspike/cython/cython_directionality.pyx109
-rw-r--r--pyspike/spike_directionality.py232
-rw-r--r--test/test_directionality.py80
4 files changed, 311 insertions, 117 deletions
diff --git a/pyspike/__init__.py b/pyspike/__init__.py
index 7fa5265..10d2936 100644
--- a/pyspike/__init__.py
+++ b/pyspike/__init__.py
@@ -28,9 +28,10 @@ from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \
merge_spike_trains, generate_poisson_spikes
from spike_directionality import spike_directionality, \
- spike_directionality_matrix, spike_train_order_profile, \
- optimal_spike_train_order_from_matrix, optimal_spike_train_order, \
- permutate_matrix
+ spike_directionality_profiles, spike_directionality_matrix, \
+ spike_train_order_profile, spike_train_order, \
+ spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \
+ optimal_spike_train_order, permutate_matrix
# define the __version__ following
# http://stackoverflow.com/questions/17583443
diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx
index e1f63c4..ac37690 100644
--- a/pyspike/cython/cython_directionality.pyx
+++ b/pyspike/cython/cython_directionality.pyx
@@ -128,21 +128,68 @@ def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2,
return st, a, mp
+############################################################
+# spike_train_order_cython
+############################################################
+def spike_train_order_cython(double[:] spikes1, double[:] spikes2,
+ double t_start, double t_end, double max_tau):
+
+ cdef int N1 = len(spikes1)
+ cdef int N2 = len(spikes2)
+ cdef int i = -1
+ cdef int j = -1
+ cdef int d = 0
+ cdef int mp = 0
+ cdef double interval = t_end - t_start
+ cdef double tau
+ while i + j < N1 + N2 - 2:
+ if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
+ i += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if j > -1 and spikes1[i]-spikes2[j] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 2 appeared before spike in spike train 1
+ # mark with -1
+ d -= 2
+ elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
+ j += 1
+ mp += 1
+ tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
+ if i > -1 and spikes2[j]-spikes1[i] < tau:
+ # coincidence between the current spike and the previous spike
+ # spike in spike train 1 appeared before spike in spike train 2
+ # mark with +1
+ d += 2
+ else: # spikes1[i+1] = spikes2[j+1]
+ # advance in both spike trains
+ j += 1
+ i += 1
+ # add only one event with multiplicity 2, but no asymmetry counting
+ mp += 2
+
+ if d == 0 and mp == 0:
+ # empty spike trains -> spike sync = 1 by definition
+ d = 1
+ mp = 1
+
+ return d, mp
+
############################################################
-# spike_order_values_cython
+# spike_directionality_profiles_cython
############################################################
-def spike_order_values_cython(double[:] spikes1,
- double[:] spikes2,
- double t_start, double t_end,
- double max_tau):
+def spike_directionality_profiles_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
cdef int N1 = len(spikes1)
cdef int N2 = len(spikes2)
cdef int i = -1
cdef int j = -1
- cdef double[:] a1 = np.zeros(N1) # asymmetry values
- cdef double[:] a2 = np.zeros(N2) # asymmetry values
+ cdef double[:] d1 = np.zeros(N1) # directionality values
+ cdef double[:] d2 = np.zeros(N2) # directionality values
cdef double interval = t_end - t_start
cdef double tau
while i + j < N1 + N2 - 2:
@@ -153,8 +200,8 @@ def spike_order_values_cython(double[:] spikes1,
# coincidence between the current spike and the previous spike
# spike from spike train 1 after spike train 2
# leading spike gets +1, following spike -1
- a1[i] = -1
- a2[j] = +1
+ d1[i] = -1
+ d2[j] = +1
elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
j += 1
tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
@@ -162,62 +209,54 @@ def spike_order_values_cython(double[:] spikes1,
# coincidence between the current spike and the previous spike
# spike from spike train 1 before spike train 2
# leading spike gets +1, following spike -1
- a1[i] = +1
- a2[j] = -1
+ d1[i] = +1
+ d2[j] = -1
else: # spikes1[i+1] = spikes2[j+1]
# advance in both spike trains
j += 1
i += 1
# equal spike times: zero asymmetry value
- a1[i] = 0
- a2[j] = 0
+ d1[i] = 0
+ d2[j] = 0
- return a1, a2
+ return d1, d2
############################################################
-# spike_train_order_cython
+# spike_directionality_cython
############################################################
-def spike_train_order_cython(double[:] spikes1, double[:] spikes2,
- double t_start, double t_end, double max_tau):
+def spike_directionality_cython(double[:] spikes1,
+ double[:] spikes2,
+ double t_start, double t_end,
+ double max_tau):
cdef int N1 = len(spikes1)
cdef int N2 = len(spikes2)
cdef int i = -1
cdef int j = -1
- cdef int asym = 0
- cdef int mp = 0
+ cdef int d = 0 # directionality value
cdef double interval = t_end - t_start
cdef double tau
while i + j < N1 + N2 - 2:
if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]):
i += 1
- mp += 1
tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
if j > -1 and spikes1[i]-spikes2[j] < tau:
# coincidence between the current spike and the previous spike
- # spike in spike train 2 appeared before spike in spike train 1
- # mark with -1
- asym -= 2
+ # spike from spike train 1 after spike train 2
+ # leading spike gets +1, following spike -1
+ d -= 1
elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]):
j += 1
- mp += 1
tau = get_tau(spikes1, spikes2, i, j, interval, max_tau)
if i > -1 and spikes2[j]-spikes1[i] < tau:
# coincidence between the current spike and the previous spike
- # spike in spike train 1 appeared before spike in spike train 2
- # mark with +1
- asym += 2
+ # spike from spike train 1 before spike train 2
+ # leading spike gets +1, following spike -1
+ d += 1
else: # spikes1[i+1] = spikes2[j+1]
# advance in both spike trains
j += 1
i += 1
- # add only one event with multiplicity 2, but no asymmetry counting
- mp += 2
-
- if asym == 0 and mp == 0:
- # empty spike trains -> spike sync = 1 by definition
- asym = 1
- mp = 1
- return asym, mp
+ return d
diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py
index 0e69cb5..f608ecc 100644
--- a/pyspike/spike_directionality.py
+++ b/pyspike/spike_directionality.py
@@ -7,6 +7,8 @@ import numpy as np
from math import exp
import pyspike
from pyspike import DiscreteFunc
+from functools import partial
+from pyspike.generic import _generic_profile_multi
############################################################
@@ -21,23 +23,21 @@ def spike_directionality(spike_train1, spike_train2, normalize=True,
# for optimal performance
try:
from cython.cython_directionality import \
- spike_train_order_cython as spike_train_order_impl
+ spike_directionality_cython as spike_directionality_impl
if max_tau is None:
max_tau = 0.0
- c, mp = spike_train_order_impl(spike_train1.spikes,
- spike_train2.spikes,
- spike_train1.t_start,
- spike_train1.t_end,
- max_tau)
+ d = spike_directionality_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ c = len(spike_train1.spikes)
except ImportError:
- # Cython backend not available: fall back to profile averaging
- c, mp = _spike_directionality_profile(spike_train1,
- spike_train2,
- max_tau).integral(interval)
+ raise NotImplementedError()
if normalize:
- return 1.0*c/mp
+ return 1.0*d/c
else:
- return c
+ return d
else:
# some specific interval is provided: not yet implemented
raise NotImplementedError()
@@ -70,11 +70,11 @@ def spike_directionality_matrix(spike_trains, normalize=True, indices=None,
############################################################
-# spike_train_order_profile
+# spike_directionality_profiles
############################################################
-def spike_train_order_profile(spike_trains, indices=None,
- interval=None, max_tau=None):
- """ Computes the spike train symmetry value for each spike in each spike
+def spike_directionality_profiles(spike_trains, indices=None,
+ interval=None, max_tau=None):
+ """ Computes the spike directionality value for each spike in each spike
train.
"""
if indices is None:
@@ -92,7 +92,7 @@ def spike_train_order_profile(spike_trains, indices=None,
# cython implementation
try:
from cython.cython_directionality import \
- spike_order_values_cython as spike_order_values_impl
+ spike_directionality_profiles_cython as profile_impl
except ImportError:
raise NotImplementedError()
# if not(pyspike.disable_backend_warning):
@@ -107,11 +107,9 @@ def spike_train_order_profile(spike_trains, indices=None,
max_tau = 0.0
for i, j in pairs:
- a1, a2 = spike_order_values_impl(spike_trains[i].spikes,
- spike_trains[j].spikes,
- spike_trains[i].t_start,
- spike_trains[i].t_end,
- max_tau)
+ a1, a2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes,
+ spike_trains[i].t_start, spike_trains[i].t_end,
+ max_tau)
asymmetry_list[i] += a1
asymmetry_list[j] += a2
for a in asymmetry_list:
@@ -120,6 +118,114 @@ def spike_train_order_profile(spike_trains, indices=None,
############################################################
+# spike_train_order_profile
+############################################################
+def spike_train_order_profile(spike_train1, spike_train2, max_tau=None):
+ """ Computes the spike train order profile P(t) of the two given
+ spike trains. Returns the profile as a DiscreteFunction object.
+ :param spike_train1: First spike train.
+ :type spike_train1: :class:`pyspike.SpikeTrain`
+ :param spike_train2: Second spike train.
+ :type spike_train2: :class:`pyspike.SpikeTrain`
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The spike-distance profile :math:`S_{sync}(t)`.
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ # check whether the spike trains are defined for the same interval
+ assert spike_train1.t_start == spike_train2.t_start, \
+ "Given spike trains are not defined on the same interval!"
+ assert spike_train1.t_end == spike_train2.t_end, \
+ "Given spike trains are not defined on the same interval!"
+
+ # cython implementation
+ try:
+ from cython.cython_directionality import \
+ spike_train_order_profile_cython as \
+ spike_train_order_profile_impl
+ except ImportError:
+ # raise NotImplementedError()
+ if not(pyspike.disable_backend_warning):
+ print("Warning: spike_distance_cython not found. Make sure that \
+PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+Falling back to slow python backend.")
+ # use python backend
+ from cython.directionality_python_backend import \
+ spike_train_order_python as spike_train_order_profile_impl
+
+ if max_tau is None:
+ max_tau = 0.0
+
+ times, coincidences, multiplicity \
+ = spike_train_order_profile_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+
+ return DiscreteFunc(times, coincidences, multiplicity)
+
+
+############################################################
+# spike_train_order
+############################################################
+def spike_train_order(spike_train1, spike_train2, normalize=True,
+ interval=None, max_tau=None):
+ """ Computes the overall spike delay asymmetry value for two spike trains.
+ """
+ if interval is None:
+ # distance over the whole interval is requested: use specific function
+ # for optimal performance
+ try:
+ from cython.cython_directionality import \
+ spike_train_order_cython as spike_train_order_impl
+ if max_tau is None:
+ max_tau = 0.0
+ c, mp = spike_train_order_impl(spike_train1.spikes,
+ spike_train2.spikes,
+ spike_train1.t_start,
+ spike_train1.t_end,
+ max_tau)
+ except ImportError:
+ # Cython backend not available: fall back to profile averaging
+ c, mp = spike_train_order_profile(spike_train1, spike_train2,
+ max_tau).integral(interval)
+ if normalize:
+ return 1.0*c/mp
+ else:
+ return c
+ else:
+ # some specific interval is provided: not yet implemented
+ raise NotImplementedError()
+
+
+############################################################
+# spike_train_order_profile_multi
+############################################################
+def spike_train_order_profile_multi(spike_trains, indices=None,
+ max_tau=None):
+ """ Computes the multi-variate spike delay asymmetry profile for a set of
+ spike trains. For each spike in the set of spike trains, the multi-variate
+ profile is defined as the sum of asymmetry values divided by the number of
+ spike trains pairs involving the spike train of containing this spike,
+ which is the number of spike trains minus one (N-1).
+ :param spike_trains: list of :class:`pyspike.SpikeTrain`
+ :param indices: list of indices defining which spike trains to use,
+ if None all given spike trains are used (default=None)
+ :type indices: list or None
+ :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+ coincidence window has no upper bound.
+ :returns: The multi-variate spike sync profile :math:`<S_{sync}>(t)`
+ :rtype: :class:`pyspike.function.DiscreteFunction`
+ """
+ prof_func = partial(spike_train_order_profile, max_tau=max_tau)
+ average_prof, M = _generic_profile_multi(spike_trains, prof_func,
+ indices)
+ # average_dist.mul_scalar(1.0/M) # no normalization here!
+ return average_prof
+
+
+############################################################
# optimal_spike_train_order_from_matrix
############################################################
def optimal_spike_train_order_from_matrix(D, full_output=False):
@@ -195,50 +301,50 @@ def permutate_matrix(D, p):
############################################################
# _spike_directionality_profile
############################################################
-def _spike_directionality_profile(spike_train1, spike_train2,
- max_tau=None):
- """ Computes the spike delay asymmetry profile A(t) of the two given
- spike trains. Returns the profile as a DiscreteFunction object.
+# def _spike_directionality_profile(spike_train1, spike_train2,
+# max_tau=None):
+# """ Computes the spike delay asymmetry profile A(t) of the two given
+# spike trains. Returns the profile as a DiscreteFunction object.
- :param spike_train1: First spike train.
- :type spike_train1: :class:`pyspike.SpikeTrain`
- :param spike_train2: Second spike train.
- :type spike_train2: :class:`pyspike.SpikeTrain`
- :param max_tau: Maximum coincidence window size. If 0 or `None`, the
- coincidence window has no upper bound.
- :returns: The spike-distance profile :math:`S_{sync}(t)`.
- :rtype: :class:`pyspike.function.DiscreteFunction`
+# :param spike_train1: First spike train.
+# :type spike_train1: :class:`pyspike.SpikeTrain`
+# :param spike_train2: Second spike train.
+# :type spike_train2: :class:`pyspike.SpikeTrain`
+# :param max_tau: Maximum coincidence window size. If 0 or `None`, the
+# coincidence window has no upper bound.
+# :returns: The spike-distance profile :math:`S_{sync}(t)`.
+# :rtype: :class:`pyspike.function.DiscreteFunction`
- """
- # check whether the spike trains are defined for the same interval
- assert spike_train1.t_start == spike_train2.t_start, \
- "Given spike trains are not defined on the same interval!"
- assert spike_train1.t_end == spike_train2.t_end, \
- "Given spike trains are not defined on the same interval!"
+# """
+# # check whether the spike trains are defined for the same interval
+# assert spike_train1.t_start == spike_train2.t_start, \
+# "Given spike trains are not defined on the same interval!"
+# assert spike_train1.t_end == spike_train2.t_end, \
+# "Given spike trains are not defined on the same interval!"
- # cython implementation
- try:
- from cython.cython_directionality import \
- spike_train_order_profile_cython as \
- spike_train_order_profile_impl
- except ImportError:
- # raise NotImplementedError()
- if not(pyspike.disable_backend_warning):
- print("Warning: spike_distance_cython not found. Make sure that \
-PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
-Falling back to slow python backend.")
- # use python backend
- from cython.directionality_python_backend import \
- spike_train_order_python as spike_train_order_profile_impl
+# # cython implementation
+# try:
+# from cython.cython_directionality import \
+# spike_train_order_profile_cython as \
+# spike_train_order_profile_impl
+# except ImportError:
+# # raise NotImplementedError()
+# if not(pyspike.disable_backend_warning):
+# print("Warning: spike_distance_cython not found. Make sure that \
+# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \
+# Falling back to slow python backend.")
+# # use python backend
+# from cython.directionality_python_backend import \
+# spike_train_order_python as spike_train_order_profile_impl
- if max_tau is None:
- max_tau = 0.0
+# if max_tau is None:
+# max_tau = 0.0
- times, coincidences, multiplicity \
- = spike_train_order_profile_impl(spike_train1.spikes,
- spike_train2.spikes,
- spike_train1.t_start,
- spike_train1.t_end,
- max_tau)
+# times, coincidences, multiplicity \
+# = spike_train_order_profile_impl(spike_train1.spikes,
+# spike_train2.spikes,
+# spike_train1.t_start,
+# spike_train1.t_end,
+# max_tau)
- return DiscreteFunc(times, coincidences, multiplicity)
+# return DiscreteFunc(times, coincidences, multiplicity)
diff --git a/test/test_directionality.py b/test/test_directionality.py
index 5c3da00..7ca5d58 100644
--- a/test/test_directionality.py
+++ b/test/test_directionality.py
@@ -14,28 +14,76 @@ from numpy.testing import assert_equal, assert_almost_equal, \
import pyspike as spk
from pyspike import SpikeTrain, DiscreteFunc
-from pyspike.spike_directionality import _spike_directionality_profile
+# from pyspike.spike_directionality import _spike_directionality_profile
-def test_profile():
+def test_spike_directionality():
st1 = SpikeTrain([100, 200, 300], [0, 1000])
st2 = SpikeTrain([105, 205, 300], [0, 1000])
- expected_x = np.array([0, 100, 105, 200, 205, 300, 1000])
- expected_y = np.array([1, 1, 1, 1, 1, 0, 0])
- expected_mp = np.array([1, 1, 1, 1, 1, 2, 2])
-
- f = _spike_directionality_profile(st1, st2)
-
- assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp))
- assert_almost_equal(f.avrg(), 2.0/3.0)
assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0)
assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False),
- 4.0)
+ 2.0)
+
+ # exchange order of spike trains should give exact negative profile
+ assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0)
+ assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False),
+ -2.0)
+
+ st3 = SpikeTrain([105, 195, 500], [0, 1000])
+ assert_almost_equal(spk.spike_directionality(st1, st3), 0.0)
+ assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False),
+ 0.0)
+ assert_almost_equal(spk.spike_directionality(st3, st1), 0.0)
+
+ D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False)
+ D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]])
+ assert_array_equal(D, D_expected)
+
+ dir_profs = spk.spike_directionality_profiles([st1, st2, st3])
+ assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0])
+ assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0])
+
+def test_spike_train_order():
+ st1 = SpikeTrain([100, 200, 300], [0, 1000])
+ st2 = SpikeTrain([105, 205, 300], [0, 1000])
st3 = SpikeTrain([105, 195, 500], [0, 1000])
- expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000])
- expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0])
- expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1])
- f = _spike_directionality_profile(st1, st3)
- assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp))
+ expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000])
+ expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0])
+ expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2])
+
+ f = spk.spike_train_order_profile(st1, st2)
+
+ assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12,
+ expected_mp12))
+ assert_almost_equal(f.avrg(), 2.0/3.0)
+ assert_almost_equal(f.avrg(normalize=False), 4.0)
+ assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0)
+ assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0)
+
+ expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000])
+ expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0])
+ expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1])
+
+ f = spk.spike_train_order_profile(st2, st3)
+
+ assert_array_equal(f.x, expected_x23)
+ assert_array_equal(f.y, expected_y23)
+ assert_array_equal(f.mp, expected_mp23)
+ assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23,
+ expected_mp23))
+ assert_almost_equal(f.avrg(), -1.0/3.0)
+ assert_almost_equal(f.avrg(normalize=False), -2.0)
+ assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0)
+ assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0)
+
+ f = spk.spike_train_order_profile_multi([st1, st2, st3])
+
+ expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000])
+ expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0])
+ expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 4])
+
+ assert_array_equal(f.x, expected_x)
+ assert_array_equal(f.y, expected_y)
+ assert_array_equal(f.mp, expected_mp)