From 53fdbb7ddb2a3a5e8d6f75ad69f0da90d3b6b5e6 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 12:10:01 +0200 Subject: reorganized directionality module --- pyspike/__init__.py | 9 +- pyspike/cython/cython_directionality.pyx | 223 +++++++++++++++++ pyspike/cython/directionality_python_backend.py | 89 +++++++ pyspike/directionality/__init__.py | 13 - pyspike/directionality/cython/__init__.py | 0 .../cython/cython_directionality.pyx | 223 ----------------- .../cython/directionality_python_backend.py | 89 ------- pyspike/directionality/spike_train_order.py | 264 --------------------- pyspike/spike_directionality.py | 244 +++++++++++++++++++ setup.py | 12 +- test/test_directionality.py | 41 ++++ test/test_spike_delay_asymmetry.py | 40 ---- 12 files changed, 609 insertions(+), 638 deletions(-) create mode 100644 pyspike/cython/cython_directionality.pyx create mode 100644 pyspike/cython/directionality_python_backend.py delete mode 100644 pyspike/directionality/__init__.py delete mode 100644 pyspike/directionality/cython/__init__.py delete mode 100644 pyspike/directionality/cython/cython_directionality.pyx delete mode 100644 pyspike/directionality/cython/directionality_python_backend.py delete mode 100644 pyspike/directionality/spike_train_order.py create mode 100644 pyspike/spike_directionality.py create mode 100644 test/test_directionality.py delete mode 100644 test/test_spike_delay_asymmetry.py diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 8d92ea4..a37174c 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -5,8 +5,8 @@ Distributed under the BSD License """ __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc", "directionality"] + "spikes", "spike_directionality", "SpikeTrain", + "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from PieceWiseConstFunc import PieceWiseConstFunc from PieceWiseLinFunc import PieceWiseLinFunc @@ -24,7 +24,10 @@ from psth import psth from spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes -import directionality as drct +from spike_directionality import spike_directionality, \ + spike_directionality_matrix, spike_train_order_profile, \ + optimal_spike_train_order_from_matrix, optimal_spike_train_order, \ + permutate_matrix # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx new file mode 100644 index 0000000..e1f63c4 --- /dev/null +++ b/pyspike/cython/cython_directionality.pyx @@ -0,0 +1,223 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_train_order_profile_cython +############################################################ +def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + + +############################################################ +# spike_order_values_cython +############################################################ +def spike_order_values_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double[:] a1 = np.zeros(N1) # asymmetry values + cdef double[:] a2 = np.zeros(N2) # asymmetry values + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + a1[i] = -1 + a2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + a1[i] = +1 + a2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # equal spike times: zero asymmetry value + a1[i] = 0 + a2[j] = 0 + + return a1, a2 + + +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int asym = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + asym -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + asym += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if asym == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + asym = 1 + mp = 1 + + return asym, mp diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py new file mode 100644 index 0000000..e14238f --- /dev/null +++ b/pyspike/cython/directionality_python_backend.py @@ -0,0 +1,89 @@ +""" directionality_python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_python(spikes1, spikes2, t_start, t_end, max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + n = 0 + st = np.zeros(N1 + N2 + 2) # spike times + a = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp diff --git a/pyspike/directionality/__init__.py b/pyspike/directionality/__init__.py deleted file mode 100644 index 6f74c50..0000000 --- a/pyspike/directionality/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -Copyright 2015, Mario Mulansky - -Distributed under the BSD License -""" - -__all__ = ["spike_train_order"] - -from spike_train_order import spike_train_order_profile, \ - spike_train_order, spike_train_order_profile_multi, \ - spike_train_order_matrix, spike_order_values, \ - optimal_spike_train_order, optimal_spike_train_order_from_matrix, \ - permutate_matrix diff --git a/pyspike/directionality/cython/__init__.py b/pyspike/directionality/cython/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/pyspike/directionality/cython/cython_directionality.pyx b/pyspike/directionality/cython/cython_directionality.pyx deleted file mode 100644 index e1f63c4..0000000 --- a/pyspike/directionality/cython/cython_directionality.pyx +++ /dev/null @@ -1,223 +0,0 @@ -#cython: boundscheck=False -#cython: wraparound=False -#cython: cdivision=True - -""" -cython_directionality.pyx - -cython implementation of the spike delay asymmetry measures - -Copyright 2015, Mario Mulansky - -Distributed under the BSD License - -""" - -""" -To test whether things can be optimized: remove all yellow stuff -in the html output:: - - cython -a cython_directionality.pyx - -which gives:: - - cython_directionality.html - -""" - -import numpy as np -cimport numpy as np - -from libc.math cimport fabs -from libc.math cimport fmax -from libc.math cimport fmin - -# from pyspike.cython.cython_distances cimport get_tau - -DTYPE = np.float -ctypedef np.float_t DTYPE_t - - -############################################################ -# get_tau -############################################################ -cdef inline double get_tau(double[:] spikes1, double[:] spikes2, - int i, int j, double interval, double max_tau): - cdef double m = interval # use interval length as initial tau - cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 - cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 - if i < N1 and i > -1: - m = fmin(m, spikes1[i+1]-spikes1[i]) - if j < N2 and j > -1: - m = fmin(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = fmin(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = fmin(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = fmin(m, max_tau) - return m - - -############################################################ -# spike_train_order_profile_cython -############################################################ -def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, - double t_start, double t_end, - double max_tau): - - cdef int N1 = len(spikes1) - cdef int N2 = len(spikes2) - cdef int i = -1 - cdef int j = -1 - cdef int n = 0 - cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times - cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values - cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity - cdef double interval = t_end - t_start - cdef double tau - while i + j < N1 + N2 - 2: - if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - st[n] = spikes1[i] - if j > -1 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # spike from spike train 1 after spike train 2 - # both get marked with -1 - a[n] = -1 - a[n-1] = -1 - elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - st[n] = spikes2[j] - if i > -1 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # spike from spike train 1 before spike train 2 - # both get marked with 1 - a[n] = 1 - a[n-1] = 1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - n += 1 - # add only one event with zero asymmetry value and multiplicity 2 - st[n] = spikes1[i] - a[n] = 0 - mp[n] = 2 - - st = st[:n+2] - a = a[:n+2] - mp = mp[:n+2] - - st[0] = t_start - st[len(st)-1] = t_end - if N1 + N2 > 0: - a[0] = a[1] - a[len(a)-1] = a[len(a)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] - else: - a[0] = 1 - a[1] = 1 - - return st, a, mp - - - -############################################################ -# spike_order_values_cython -############################################################ -def spike_order_values_cython(double[:] spikes1, - double[:] spikes2, - double t_start, double t_end, - double max_tau): - - cdef int N1 = len(spikes1) - cdef int N2 = len(spikes2) - cdef int i = -1 - cdef int j = -1 - cdef double[:] a1 = np.zeros(N1) # asymmetry values - cdef double[:] a2 = np.zeros(N2) # asymmetry values - cdef double interval = t_end - t_start - cdef double tau - while i + j < N1 + N2 - 2: - if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): - i += 1 - tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - if j > -1 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # spike from spike train 1 after spike train 2 - # leading spike gets +1, following spike -1 - a1[i] = -1 - a2[j] = +1 - elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): - j += 1 - tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - if i > -1 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # spike from spike train 1 before spike train 2 - # leading spike gets +1, following spike -1 - a1[i] = +1 - a2[j] = -1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - # equal spike times: zero asymmetry value - a1[i] = 0 - a2[j] = 0 - - return a1, a2 - - -############################################################ -# spike_train_order_cython -############################################################ -def spike_train_order_cython(double[:] spikes1, double[:] spikes2, - double t_start, double t_end, double max_tau): - - cdef int N1 = len(spikes1) - cdef int N2 = len(spikes2) - cdef int i = -1 - cdef int j = -1 - cdef int asym = 0 - cdef int mp = 0 - cdef double interval = t_end - t_start - cdef double tau - while i + j < N1 + N2 - 2: - if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): - i += 1 - mp += 1 - tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - if j > -1 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # spike in spike train 2 appeared before spike in spike train 1 - # mark with -1 - asym -= 2 - elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): - j += 1 - mp += 1 - tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - if i > -1 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # spike in spike train 1 appeared before spike in spike train 2 - # mark with +1 - asym += 2 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - # add only one event with multiplicity 2, but no asymmetry counting - mp += 2 - - if asym == 0 and mp == 0: - # empty spike trains -> spike sync = 1 by definition - asym = 1 - mp = 1 - - return asym, mp diff --git a/pyspike/directionality/cython/directionality_python_backend.py b/pyspike/directionality/cython/directionality_python_backend.py deleted file mode 100644 index e14238f..0000000 --- a/pyspike/directionality/cython/directionality_python_backend.py +++ /dev/null @@ -1,89 +0,0 @@ -""" directionality_python_backend.py - -Collection of python functions that can be used instead of the cython -implementation. - -Copyright 2015, Mario Mulansky - -Distributed under the BSD License - -""" - -import numpy as np - - -############################################################ -# spike_train_order_python -############################################################ -def spike_train_order_python(spikes1, spikes2, t_start, t_end, max_tau): - - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - - N1 = len(spikes1) - N2 = len(spikes2) - i = -1 - j = -1 - n = 0 - st = np.zeros(N1 + N2 + 2) # spike times - a = np.zeros(N1 + N2 + 2) # coincidences - mp = np.ones(N1 + N2 + 2) # multiplicity - while i + j < N1 + N2 - 2: - if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) - st[n] = spikes1[i] - if j > -1 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - a[n] = -1 - a[n-1] = -1 - elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) - st[n] = spikes2[j] - if i > -1 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - a[n] = 1 - a[n-1] = 1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - n += 1 - # add only one event with zero asymmetry value and multiplicity 2 - st[n] = spikes1[i] - a[n] = 0 - mp[n] = 2 - - st = st[:n+2] - a = a[:n+2] - mp = mp[:n+2] - - st[0] = t_start - st[len(st)-1] = t_end - if N1 + N2 > 0: - a[0] = a[1] - a[len(a)-1] = a[len(a)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] - else: - a[0] = 1 - a[1] = 1 - - return st, a, mp diff --git a/pyspike/directionality/spike_train_order.py b/pyspike/directionality/spike_train_order.py deleted file mode 100644 index 892ffd0..0000000 --- a/pyspike/directionality/spike_train_order.py +++ /dev/null @@ -1,264 +0,0 @@ -# Module containing functions to compute multivariate spike train order -# Copyright 2015, Mario Mulansky -# Distributed under the BSD License - -import numpy as np -from math import exp -from functools import partial -import pyspike -from pyspike import DiscreteFunc -from pyspike.generic import _generic_profile_multi - - -############################################################ -# spike_train_order_profile -############################################################ -def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): - """ Computes the spike delay asymmetry profile A(t) of the two given - spike trains. Returns the profile as a DiscreteFunction object. - - :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` - :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` - :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. - :returns: The spike-distance profile :math:`S_{sync}(t)`. - :rtype: :class:`pyspike.function.DiscreteFunction` - - """ - # check whether the spike trains are defined for the same interval - assert spike_train1.t_start == spike_train2.t_start, \ - "Given spike trains are not defined on the same interval!" - assert spike_train1.t_end == spike_train2.t_end, \ - "Given spike trains are not defined on the same interval!" - - # cython implementation - try: - from cython.cython_directionality import \ - spike_train_order_profile_cython as \ - spike_train_order_profile_impl - except ImportError: - # raise NotImplementedError() - if not(pyspike.disable_backend_warning): - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from cython.directionality_python_backend import \ - spike_train_order_python as spike_train_order_profile_impl - - if max_tau is None: - max_tau = 0.0 - - times, coincidences, multiplicity \ - = spike_train_order_profile_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) - - return DiscreteFunc(times, coincidences, multiplicity) - - -############################################################ -# spike_train_order -############################################################ -def spike_train_order(spike_train1, spike_train2, normalize=True, - interval=None, max_tau=None): - """ Computes the overall spike delay asymmetry value for two spike trains. - """ - if interval is None: - # distance over the whole interval is requested: use specific function - # for optimal performance - try: - from cython.cython_directionality import \ - spike_train_order_cython as spike_train_order_impl - if max_tau is None: - max_tau = 0.0 - c, mp = spike_train_order_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) - except ImportError: - # Cython backend not available: fall back to profile averaging - c, mp = spike_train_order_profile(spike_train1, spike_train2, - max_tau).integral(interval) - if normalize: - return 1.0*c/mp - else: - return c - else: - # some specific interval is provided: not yet implemented - raise NotImplementedError() - - -############################################################ -# spike_train_order_profile_multi -############################################################ -def spike_train_order_profile_multi(spike_trains, indices=None, - max_tau=None): - """ Computes the multi-variate spike delay asymmetry profile for a set of - spike trains. For each spike in the set of spike trains, the multi-variate - profile is defined as the sum of asymmetry values divided by the number of - spike trains pairs involving the spike train of containing this spike, - which is the number of spike trains minus one (N-1). - - :param spike_trains: list of :class:`pyspike.SpikeTrain` - :param indices: list of indices defining which spike trains to use, - if None all given spike trains are used (default=None) - :type indices: list or None - :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. - :returns: The multi-variate spike sync profile :math:`(t)` - :rtype: :class:`pyspike.function.DiscreteFunction` - - """ - prof_func = partial(spike_train_order_profile, max_tau=max_tau) - average_prof, M = _generic_profile_multi(spike_trains, prof_func, - indices) - # average_dist.mul_scalar(1.0/M) # no normalization here! - return average_prof - - -############################################################ -# spike_train_order_matrix -############################################################ -def spike_train_order_matrix(spike_trains, normalize=True, indices=None, - interval=None, max_tau=None): - """ Computes the spike delay asymmetry matrix for the given spike trains. - """ - if indices is None: - indices = np.arange(len(spike_trains)) - indices = np.array(indices) - # check validity of indices - assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." - # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) - for j in indices[i+1:]] - - distance_matrix = np.zeros((len(indices), len(indices))) - for i, j in pairs: - d = spike_train_order(spike_trains[i], spike_trains[j], normalize, - interval, max_tau=max_tau) - distance_matrix[i, j] = d - distance_matrix[j, i] = -d - return distance_matrix - - -############################################################ -# spike_order_values -############################################################ -def spike_order_values(spike_trains, indices=None, - interval=None, max_tau=None): - """ Computes the spike train symmetry value for each spike in each spike - train. - """ - if indices is None: - indices = np.arange(len(spike_trains)) - indices = np.array(indices) - # check validity of indices - assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." - # list of arrays for reulting asymmetry values - asymmetry_list = [np.zeros_like(st.spikes) for st in spike_trains] - # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) - for j in indices[i+1:]] - - # cython implementation - try: - from cython.cython_directionality import \ - spike_order_values_cython as spike_order_values_impl - except ImportError: - raise NotImplementedError() -# if not(pyspike.disable_backend_warning): -# print("Warning: spike_distance_cython not found. Make sure that \ -# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -# Falling back to slow python backend.") -# # use python backend -# from cython.python_backend import coincidence_python \ -# as coincidence_profile_impl - - if max_tau is None: - max_tau = 0.0 - - for i, j in pairs: - a1, a2 = spike_order_values_impl(spike_trains[i].spikes, - spike_trains[j].spikes, - spike_trains[i].t_start, - spike_trains[i].t_end, - max_tau) - asymmetry_list[i] += a1 - asymmetry_list[j] += a2 - for a in asymmetry_list: - a /= len(spike_trains)-1 - return asymmetry_list - - -############################################################ -# optimal_asymmetry_order_from_matrix -############################################################ -def optimal_spike_train_order_from_matrix(D, full_output=False): - """ finds the best sorting via simulated annealing. - Returns the optimal permutation p and A value. - Internal function, don't call directly! Use optimal_asymmetry_order - instead. - """ - N = len(D) - A = np.sum(np.triu(D, 0)) - - p = np.arange(N) - - T = 2*np.max(D) # starting temperature - T_end = 1E-5 * T # final temperature - alpha = 0.9 # cooling factor - total_iter = 0 - while T > T_end: - iterations = 0 - succ_iter = 0 - while iterations < 100*N and succ_iter < 10*N: - # exchange two rows and cols - ind1 = np.random.randint(N-1) - delta_A = -2*D[p[ind1], p[ind1+1]] - if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): - # swap indices - p[ind1], p[ind1+1] = p[ind1+1], p[ind1] - A += delta_A - succ_iter += 1 - iterations += 1 - total_iter += iterations - T *= alpha # cool down - if succ_iter == 0: - break - if full_output: - return p, A, total_iter - else: - return p, A - - -############################################################ -# optimal_spike_train_order -############################################################ -def optimal_spike_train_order(spike_trains, indices=None, interval=None, - max_tau=None, full_output=False): - """ finds the best sorting of the given spike trains via simulated - annealing. - Returns the optimal permutation p and A value. - """ - D = spike_train_order_matrix(spike_trains, indices, interval, max_tau) - return optimal_spike_train_order_from_matrix(D, full_output) - - -############################################################ -# permutate_matrix -############################################################ -def permutate_matrix(D, p): - N = len(D) - D_p = np.empty_like(D) - for n in xrange(N): - for m in xrange(N): - D_p[n, m] = D[p[n], p[m]] - return D_p diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py new file mode 100644 index 0000000..0e69cb5 --- /dev/null +++ b/pyspike/spike_directionality.py @@ -0,0 +1,244 @@ +# Module containing functions to compute the SPIKE directionality and the +# spike train order profile +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +import numpy as np +from math import exp +import pyspike +from pyspike import DiscreteFunc + + +############################################################ +# spike_directionality +############################################################ +def spike_directionality(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike directionality for two spike trains. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_impl + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = _spike_directionality_profile(spike_train1, + spike_train2, + max_tau).integral(interval) + if normalize: + return 1.0*c/mp + else: + return c + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError() + + +############################################################ +# spike_directionality_matrix +############################################################ +def spike_directionality_matrix(spike_trains, normalize=True, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionaity matrix for the given spike trains. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_directionality(spike_trains[i], spike_trains[j], normalize, + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the spike train symmetry value for each spike in each spike + train. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for reulting asymmetry values + asymmetry_list = [np.zeros_like(st.spikes) for st in spike_trains] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from cython.cython_directionality import \ + spike_order_values_cython as spike_order_values_impl + except ImportError: + raise NotImplementedError() +# if not(pyspike.disable_backend_warning): +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") +# # use python backend +# from cython.python_backend import coincidence_python \ +# as coincidence_profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + a1, a2 = spike_order_values_impl(spike_trains[i].spikes, + spike_trains[j].spikes, + spike_trains[i].t_start, + spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += a1 + asymmetry_list[j] += a2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + +############################################################ +# optimal_spike_train_order_from_matrix +############################################################ +def optimal_spike_train_order_from_matrix(D, full_output=False): + """ finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Internal function, don't call directly! Use optimal_asymmetry_order + instead. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T = 2*np.max(D) # starting temperature + T_end = 1E-5 * T # final temperature + alpha = 0.9 # cooling factor + total_iter = 0 + while T > T_end: + iterations = 0 + succ_iter = 0 + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + ind1 = np.random.randint(N-1) + delta_A = -2*D[p[ind1], p[ind1+1]] + if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): + # swap indices + p[ind1], p[ind1+1] = p[ind1+1], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + break + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# optimal_spike_train_order +############################################################ +def optimal_spike_train_order(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ finds the best sorting of the given spike trains via simulated + annealing. + Returns the optimal permutation p and A value. + """ + D = spike_directionality_matrix(spike_trains, normalize=False, + indices=indices, interval=interval, + max_tau=max_tau) + return optimal_spike_train_order_from_matrix(D, full_output) + + +############################################################ +# permutate_matrix +############################################################ +def permutate_matrix(D, p): + """ Applies the permutation p to the columns and rows of matrix D. + Return the new permutated matrix. + """ + N = len(D) + D_p = np.empty_like(D) + for n in xrange(N): + for m in xrange(N): + D_p[n, m] = D[p[n], p[m]] + return D_p + + +# internal helper functions + +############################################################ +# _spike_directionality_profile +############################################################ +def _spike_directionality_profile(spike_train1, spike_train2, + max_tau=None): + """ Computes the spike delay asymmetry profile A(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.directionality_python_backend import \ + spike_train_order_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) diff --git a/setup.py b/setup.py index 960c684..c130fbd 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ else: if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ os.path.isfile("pyspike/cython/cython_distances.c") and \ - os.path.isfile("pyspike/directionality/cython/cython_directionality.c"): + os.path.isfile("pyspike/cython/cython_directionality.c"): use_c = True else: use_c = False @@ -40,8 +40,8 @@ if use_cython: # Cython is available, compile .pyx -> .c ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.pyx"]), - Extension("pyspike.directionality.cython.cython_directionality", - ["pyspike/directionality/cython/cython_directionality.pyx"]) + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -52,8 +52,8 @@ elif use_c: # c files are there, compile to binaries ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.c"]), - Extension("pyspike.directionality.cython.cython_directionality", - ["pyspike/directionality/cython/cython_directionality.c"]) + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend @@ -93,7 +93,7 @@ train similarity', package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', 'cython/cython_distances.c', - 'directionality/cython/cython_directionality.c'], + 'cython/cython_directionality.c'], 'test': ['Spike_testdata.txt'] } ) diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..5c3da00 --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,41 @@ +""" test_spike_delay_asymmetry.py + +Tests the asymmetry functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc +from pyspike.spike_directionality import _spike_directionality_profile + + +def test_profile(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + expected_x = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = _spike_directionality_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 4.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000]) + expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0]) + expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1]) + + f = _spike_directionality_profile(st1, st3) + assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) diff --git a/test/test_spike_delay_asymmetry.py b/test/test_spike_delay_asymmetry.py deleted file mode 100644 index 9de16e5..0000000 --- a/test/test_spike_delay_asymmetry.py +++ /dev/null @@ -1,40 +0,0 @@ -""" test_spike_delay_asymmetry.py - -Tests the asymmetry functions - -Copyright 2015, Mario Mulansky - -Distributed under the BSD License - -""" - -import numpy as np -from numpy.testing import assert_equal, assert_almost_equal, \ - assert_array_equal - -import pyspike as spk -from pyspike import SpikeTrain, DiscreteFunc - - -def test_profile(): - st1 = SpikeTrain([100, 200, 300], [0, 1000]) - st2 = SpikeTrain([105, 205, 300], [0, 1000]) - expected_x = np.array([0, 100, 105, 200, 205, 300, 1000]) - expected_y = np.array([1, 1, 1, 1, 1, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 2, 2]) - - f = spk.drct.spike_train_order_profile(st1, st2) - - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) - assert_almost_equal(f.avrg(), 2.0/3.0) - assert_almost_equal(spk.drct.spike_train_order(st1, st2), 2.0/3.0) - assert_almost_equal(spk.drct.spike_train_order(st1, st2, normalize=False), - 4.0) - - st3 = SpikeTrain([105, 195, 500], [0, 1000]) - expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000]) - expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1]) - - f = spk.drct.spike_train_order_profile(st1, st3) - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) -- cgit v1.2.3 From d955d7bd96a578113ff89c1322d2aef15ba71cfc Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 17:40:09 +0200 Subject: further refactoring of directionality --- pyspike/__init__.py | 7 +- pyspike/cython/cython_directionality.pyx | 109 ++++++++++----- pyspike/spike_directionality.py | 232 ++++++++++++++++++++++--------- test/test_directionality.py | 80 ++++++++--- 4 files changed, 311 insertions(+), 117 deletions(-) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index a37174c..0558ce0 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -25,9 +25,10 @@ from spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes from spike_directionality import spike_directionality, \ - spike_directionality_matrix, spike_train_order_profile, \ - optimal_spike_train_order_from_matrix, optimal_spike_train_order, \ - permutate_matrix + spike_directionality_profiles, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order, \ + spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ + optimal_spike_train_order, permutate_matrix # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx index e1f63c4..ac37690 100644 --- a/pyspike/cython/cython_directionality.pyx +++ b/pyspike/cython/cython_directionality.pyx @@ -128,21 +128,68 @@ def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, return st, a, mp +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + ############################################################ -# spike_order_values_cython +# spike_directionality_profiles_cython ############################################################ -def spike_order_values_cython(double[:] spikes1, - double[:] spikes2, - double t_start, double t_end, - double max_tau): +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): cdef int N1 = len(spikes1) cdef int N2 = len(spikes2) cdef int i = -1 cdef int j = -1 - cdef double[:] a1 = np.zeros(N1) # asymmetry values - cdef double[:] a2 = np.zeros(N2) # asymmetry values + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: @@ -153,8 +200,8 @@ def spike_order_values_cython(double[:] spikes1, # coincidence between the current spike and the previous spike # spike from spike train 1 after spike train 2 # leading spike gets +1, following spike -1 - a1[i] = -1 - a2[j] = +1 + d1[i] = -1 + d2[j] = +1 elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) @@ -162,62 +209,54 @@ def spike_order_values_cython(double[:] spikes1, # coincidence between the current spike and the previous spike # spike from spike train 1 before spike train 2 # leading spike gets +1, following spike -1 - a1[i] = +1 - a2[j] = -1 + d1[i] = +1 + d2[j] = -1 else: # spikes1[i+1] = spikes2[j+1] # advance in both spike trains j += 1 i += 1 # equal spike times: zero asymmetry value - a1[i] = 0 - a2[j] = 0 + d1[i] = 0 + d2[j] = 0 - return a1, a2 + return d1, d2 ############################################################ -# spike_train_order_cython +# spike_directionality_cython ############################################################ -def spike_train_order_cython(double[:] spikes1, double[:] spikes2, - double t_start, double t_end, double max_tau): +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): cdef int N1 = len(spikes1) cdef int N2 = len(spikes2) cdef int i = -1 cdef int j = -1 - cdef int asym = 0 - cdef int mp = 0 + cdef int d = 0 # directionality value cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 - mp += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike - # spike in spike train 2 appeared before spike in spike train 1 - # mark with -1 - asym -= 2 + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 - mp += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike - # spike in spike train 1 appeared before spike in spike train 2 - # mark with +1 - asym += 2 + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 else: # spikes1[i+1] = spikes2[j+1] # advance in both spike trains j += 1 i += 1 - # add only one event with multiplicity 2, but no asymmetry counting - mp += 2 - - if asym == 0 and mp == 0: - # empty spike trains -> spike sync = 1 by definition - asym = 1 - mp = 1 - return asym, mp + return d diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index 0e69cb5..f608ecc 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -7,6 +7,8 @@ import numpy as np from math import exp import pyspike from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi ############################################################ @@ -21,23 +23,21 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, # for optimal performance try: from cython.cython_directionality import \ - spike_train_order_cython as spike_train_order_impl + spike_directionality_cython as spike_directionality_impl if max_tau is None: max_tau = 0.0 - c, mp = spike_train_order_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) except ImportError: - # Cython backend not available: fall back to profile averaging - c, mp = _spike_directionality_profile(spike_train1, - spike_train2, - max_tau).integral(interval) + raise NotImplementedError() if normalize: - return 1.0*c/mp + return 1.0*d/c else: - return c + return d else: # some specific interval is provided: not yet implemented raise NotImplementedError() @@ -70,11 +70,11 @@ def spike_directionality_matrix(spike_trains, normalize=True, indices=None, ############################################################ -# spike_train_order_profile +# spike_directionality_profiles ############################################################ -def spike_train_order_profile(spike_trains, indices=None, - interval=None, max_tau=None): - """ Computes the spike train symmetry value for each spike in each spike +def spike_directionality_profiles(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality value for each spike in each spike train. """ if indices is None: @@ -92,7 +92,7 @@ def spike_train_order_profile(spike_trains, indices=None, # cython implementation try: from cython.cython_directionality import \ - spike_order_values_cython as spike_order_values_impl + spike_directionality_profiles_cython as profile_impl except ImportError: raise NotImplementedError() # if not(pyspike.disable_backend_warning): @@ -107,11 +107,9 @@ def spike_train_order_profile(spike_trains, indices=None, max_tau = 0.0 for i, j in pairs: - a1, a2 = spike_order_values_impl(spike_trains[i].spikes, - spike_trains[j].spikes, - spike_trains[i].t_start, - spike_trains[i].t_end, - max_tau) + a1, a2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) asymmetry_list[i] += a1 asymmetry_list[j] += a2 for a in asymmetry_list: @@ -119,6 +117,114 @@ def spike_train_order_profile(spike_trains, indices=None, return asymmetry_list +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.directionality_python_backend import \ + spike_train_order_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike delay asymmetry value for two spike trains. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_impl + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau).integral(interval) + if normalize: + return 1.0*c/mp + else: + return c + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError() + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike delay asymmetry profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + # average_dist.mul_scalar(1.0/M) # no normalization here! + return average_prof + + ############################################################ # optimal_spike_train_order_from_matrix ############################################################ @@ -195,50 +301,50 @@ def permutate_matrix(D, p): ############################################################ # _spike_directionality_profile ############################################################ -def _spike_directionality_profile(spike_train1, spike_train2, - max_tau=None): - """ Computes the spike delay asymmetry profile A(t) of the two given - spike trains. Returns the profile as a DiscreteFunction object. +# def _spike_directionality_profile(spike_train1, spike_train2, +# max_tau=None): +# """ Computes the spike delay asymmetry profile A(t) of the two given +# spike trains. Returns the profile as a DiscreteFunction object. - :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` - :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` - :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. - :returns: The spike-distance profile :math:`S_{sync}(t)`. - :rtype: :class:`pyspike.function.DiscreteFunction` +# :param spike_train1: First spike train. +# :type spike_train1: :class:`pyspike.SpikeTrain` +# :param spike_train2: Second spike train. +# :type spike_train2: :class:`pyspike.SpikeTrain` +# :param max_tau: Maximum coincidence window size. If 0 or `None`, the +# coincidence window has no upper bound. +# :returns: The spike-distance profile :math:`S_{sync}(t)`. +# :rtype: :class:`pyspike.function.DiscreteFunction` - """ - # check whether the spike trains are defined for the same interval - assert spike_train1.t_start == spike_train2.t_start, \ - "Given spike trains are not defined on the same interval!" - assert spike_train1.t_end == spike_train2.t_end, \ - "Given spike trains are not defined on the same interval!" +# """ +# # check whether the spike trains are defined for the same interval +# assert spike_train1.t_start == spike_train2.t_start, \ +# "Given spike trains are not defined on the same interval!" +# assert spike_train1.t_end == spike_train2.t_end, \ +# "Given spike trains are not defined on the same interval!" - # cython implementation - try: - from cython.cython_directionality import \ - spike_train_order_profile_cython as \ - spike_train_order_profile_impl - except ImportError: - # raise NotImplementedError() - if not(pyspike.disable_backend_warning): - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from cython.directionality_python_backend import \ - spike_train_order_python as spike_train_order_profile_impl +# # cython implementation +# try: +# from cython.cython_directionality import \ +# spike_train_order_profile_cython as \ +# spike_train_order_profile_impl +# except ImportError: +# # raise NotImplementedError() +# if not(pyspike.disable_backend_warning): +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") +# # use python backend +# from cython.directionality_python_backend import \ +# spike_train_order_python as spike_train_order_profile_impl - if max_tau is None: - max_tau = 0.0 +# if max_tau is None: +# max_tau = 0.0 - times, coincidences, multiplicity \ - = spike_train_order_profile_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) +# times, coincidences, multiplicity \ +# = spike_train_order_profile_impl(spike_train1.spikes, +# spike_train2.spikes, +# spike_train1.t_start, +# spike_train1.t_end, +# max_tau) - return DiscreteFunc(times, coincidences, multiplicity) +# return DiscreteFunc(times, coincidences, multiplicity) diff --git a/test/test_directionality.py b/test/test_directionality.py index 5c3da00..7ca5d58 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -14,28 +14,76 @@ from numpy.testing import assert_equal, assert_almost_equal, \ import pyspike as spk from pyspike import SpikeTrain, DiscreteFunc -from pyspike.spike_directionality import _spike_directionality_profile +# from pyspike.spike_directionality import _spike_directionality_profile -def test_profile(): +def test_spike_directionality(): st1 = SpikeTrain([100, 200, 300], [0, 1000]) st2 = SpikeTrain([105, 205, 300], [0, 1000]) - expected_x = np.array([0, 100, 105, 200, 205, 300, 1000]) - expected_y = np.array([1, 1, 1, 1, 1, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 2, 2]) - - f = _spike_directionality_profile(st1, st2) - - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) - assert_almost_equal(f.avrg(), 2.0/3.0) assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), - 4.0) + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_profiles([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) st3 = SpikeTrain([105, 195, 500], [0, 1000]) - expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000]) - expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1]) - f = _spike_directionality_profile(st1, st3) - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 4]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) -- cgit v1.2.3 From f1f1bfa2653986c7b8b5ae4d27e7013218293b00 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 18:26:58 +0200 Subject: completed python directionality backend --- pyspike/cython/directionality_python_backend.py | 57 ++++++++++++++++++++++++- pyspike/spike_directionality.py | 29 +++++++------ 2 files changed, 72 insertions(+), 14 deletions(-) diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py index e14238f..c1d820b 100644 --- a/pyspike/cython/directionality_python_backend.py +++ b/pyspike/cython/directionality_python_backend.py @@ -15,7 +15,62 @@ import numpy as np ############################################################ # spike_train_order_python ############################################################ -def spike_train_order_python(spikes1, spikes2, t_start, t_end, max_tau): +def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + d1 = np.zeros(N1) # directionality values + d2 = np.zeros(N2) # directionality values + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in first spike train occurs after second + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in second spike train occurs after first + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): def get_tau(spikes1, spikes2, i, j, max_tau): m = t_end - t_start # use interval as initial tau diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index f608ecc..cda7fe3 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -33,7 +33,11 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, max_tau) c = len(spike_train1.spikes) except ImportError: - raise NotImplementedError() + d1, x = spike_directionality_profiles([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) + d = np.sum(d1) + c = len(spike_train1.spikes) if normalize: return 1.0*d/c else: @@ -94,24 +98,23 @@ def spike_directionality_profiles(spike_trains, indices=None, from cython.cython_directionality import \ spike_directionality_profiles_cython as profile_impl except ImportError: - raise NotImplementedError() -# if not(pyspike.disable_backend_warning): -# print("Warning: spike_distance_cython not found. Make sure that \ -# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -# Falling back to slow python backend.") -# # use python backend -# from cython.python_backend import coincidence_python \ -# as coincidence_profile_impl + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl if max_tau is None: max_tau = 0.0 for i, j in pairs: - a1, a2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, spike_trains[i].t_start, spike_trains[i].t_end, max_tau) - asymmetry_list[i] += a1 - asymmetry_list[j] += a2 + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 for a in asymmetry_list: a /= len(spike_trains)-1 return asymmetry_list @@ -151,7 +154,7 @@ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.directionality_python_backend import \ - spike_train_order_python as spike_train_order_profile_impl + spike_train_order_profile_python as spike_train_order_profile_impl if max_tau is None: max_tau = 0.0 -- cgit v1.2.3 From 81c6609191786d96c9feaf40767a1615e9a0565c Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 9 Sep 2015 17:51:03 +0200 Subject: added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. --- pyspike/cython/cython_profiles.pyx | 31 +++++++++++++++++++++++++++ pyspike/spike_sync.py | 40 ++++++++++++++++++++++++++++++++++- test/test_sync_filter.py | 43 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 test/test_sync_filter.py diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index f9893eb..3b33c5a 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -426,3 +426,34 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + print i, j, spikes1[i], spikes2[j], tau + if j > -1 and spikes1[i]-spikes2[j] < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1: + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + print i, j, spikes1[i], spikes2[j], tau + if spikes2[j]-spikes1[i] < tau: + # current spike in st1 is coincident + c[i] = 1 + + return c diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 10ebdc7..f99018a 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -6,7 +6,7 @@ import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -212,3 +212,41 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + + # cython implementation + try: + from cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cytho not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.python_backend import coincidence_single_profile_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N).remove(i): + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + return filtered_spike_trains diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..ce03b23 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,43 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_cython(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import coincidence_single_profile_python \ + as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) -- cgit v1.2.3 From b9850d8c3e35290858e4ef0b2f05f66b0f7397b2 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 10 Oct 2015 20:45:09 +0200 Subject: spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. --- pyspike/__init__.py | 3 +- pyspike/cython/cython_distances.pyx | 200 ++++++++++++++++++++++++++ pyspike/cython/cython_profiles.pyx | 14 +- pyspike/cython/cython_simulated_annealing.pyx | 82 +++++++++++ pyspike/spike_directionality.py | 54 ++++--- pyspike/spike_sync.py | 19 ++- setup.py | 14 +- test/test_sync_filter.py | 61 +++++++- 8 files changed, 406 insertions(+), 41 deletions(-) create mode 100644 pyspike/cython/cython_simulated_annealing.pyx diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 0558ce0..4c1e47e 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -18,7 +18,8 @@ from isi_distance import isi_profile, isi_distance, isi_profile_multi,\ from spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync from psth import psth from spikes import load_spike_trains_from_txt, spike_train_from_string, \ diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index c4f2349..c017bf9 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -164,6 +164,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -218,6 +220,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -237,6 +241,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -253,6 +259,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1*(t_end-t1[N1-1])/isi1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -268,6 +276,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -284,6 +294,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2*(t_end-t2[N2-1])/isi2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -320,6 +333,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1*(t_end-t1[N1-1])/isi1 s2 = dt_f2*(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 3b33c5a..61d1b18 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -442,18 +442,20 @@ def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, cdef double tau for i in xrange(N1): while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - print i, j, spikes1[i], spikes2[j], tau - if j > -1 and spikes1[i]-spikes2[j] < tau: + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: # current spike in st1 is coincident c[i] = 1 - if j < N2-1: + if j < N2-1 and spikes2[j] < spikes1[i]: + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - print i, j, spikes1[i], spikes2[j], tau - if spikes2[j]-spikes1[i] < tau: + if fabs(spikes2[j]-spikes1[i]) < tau: # current spike in st1 is coincident c[i] = 1 - return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index cda7fe3..e1f5f16 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -242,27 +242,39 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): p = np.arange(N) - T = 2*np.max(D) # starting temperature - T_end = 1E-5 * T # final temperature - alpha = 0.9 # cooling factor - total_iter = 0 - while T > T_end: - iterations = 0 - succ_iter = 0 - while iterations < 100*N and succ_iter < 10*N: - # exchange two rows and cols - ind1 = np.random.randint(N-1) - delta_A = -2*D[p[ind1], p[ind1+1]] - if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): - # swap indices - p[ind1], p[ind1+1] = p[ind1+1], p[ind1] - A += delta_A - succ_iter += 1 - iterations += 1 - total_iter += iterations - T *= alpha # cool down - if succ_iter == 0: - break + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + from cython.cython_simulated_annealing import sim_ann_cython as sim_ann + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + # T = T_start + # total_iter = 0 + # while T > T_end: + # iterations = 0 + # succ_iter = 0 + # # equilibrate for 100*N steps or 10*N successful steps + # while iterations < 100*N and succ_iter < 10*N: + # # exchange two rows and cols + # ind1 = np.random.randint(N-1) + # if ind1 < N-1: + # ind2 = ind1+1 + # else: # this can never happend + # ind2 = 0 + # delta_A = -2*D[p[ind1], p[ind2]] + # if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): + # # swap indices + # p[ind1], p[ind2] = p[ind2], p[ind1] + # A += delta_A + # succ_iter += 1 + # iterations += 1 + # total_iter += iterations + # T *= alpha # cool down + # if succ_iter == 0: + # break + if full_output: return p, A, total_iter else: diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index f99018a..4dfea6a 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -217,12 +217,14 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): ############################################################ # filter_by_spike_sync ############################################################ -def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): """ Removes the spikes with a multi-variate spike_sync value below threshold. """ N = len(spike_trains) filtered_spike_trains = [] + removed_spike_trains = [] # cython implementation try: @@ -230,7 +232,7 @@ def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): as coincidence_impl except ImportError: if not(pyspike.disable_backend_warning): - print("Warning: coincidence_single_profile_cytho not found. Make \ + print("Warning: coincidence_single_profile_cython not found. Make \ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") @@ -243,10 +245,19 @@ Falling back to slow python backend.") for i, st in enumerate(spike_trains): coincidences = np.zeros_like(st) - for j in range(N).remove(i): + for j in xrange(N): + if i == j: + continue coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, st.t_start, st.t_end, max_tau) filtered_spikes = st[coincidences > threshold*(N-1)] filtered_spike_trains.append(SpikeTrain(filtered_spikes, [st.t_start, st.t_end])) - return filtered_spike_trains + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index c130fbd..ce35773 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,8 @@ else: if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ os.path.isfile("pyspike/cython/cython_distances.c") and \ - os.path.isfile("pyspike/cython/cython_directionality.c"): + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -41,7 +42,9 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.pyx"]), Extension("pyspike.cython.cython_directionality", - ["pyspike/cython/cython_directionality.pyx"]) + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -53,7 +56,9 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.c"]), Extension("pyspike.cython.cython_directionality", - ["pyspike/cython/cython_directionality.c"]) + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend @@ -93,7 +98,8 @@ train similarity', package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', 'cython/cython_distances.c', - 'cython/cython_directionality.c'], + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], 'test': ['Spike_testdata.txt'] } ) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index ce03b23..66ffcb6 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -17,17 +17,18 @@ import pyspike as spk from pyspike import SpikeTrain -def test_cython(): +def test_single_prof(): st1 = np.array([1.0, 2.0, 3.0, 4.0]) st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) # cython implementation try: - from pyspike.cython.cython_profiles import coincidence_single_profile_cython \ - as coincidence_impl + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl except ImportError: - from pyspike.cython.python_backend import coincidence_single_profile_python \ - as coincidence_impl + from pyspike.cython.python_backend import \ + coincidence_single_profile_python as coincidence_impl sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st2, 5.0)) @@ -41,3 +42,53 @@ def test_cython(): for i, t in enumerate(st2): assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3 From 1fa8493ce15af8cd4c057eece155f1557fc241ea Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 12 Oct 2015 11:20:03 +0200 Subject: added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well --- pyspike/cython/cython_profiles.pyx | 2 +- pyspike/cython/python_backend.py | 67 ++++++++++++++++++++++++++++---------- pyspike/spike_sync.py | 2 +- test/test_sync_filter.py | 3 +- 4 files changed, 53 insertions(+), 21 deletions(-) diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 61d1b18..4663f2e 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -449,7 +449,7 @@ def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: # current spike in st1 is coincident c[i] = 1 - if j < N2-1 and spikes2[j] < spikes1[i]: + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): # in case spikes2[j] is before spikes1[i] it has to be the one # right before (see above), hence we move one forward and also # check the next spike diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 69a420f..5c4c75d 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -334,26 +334,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -366,7 +367,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -376,7 +377,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -411,6 +412,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 4dfea6a..7f1bce8 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -237,7 +237,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import coincidence_single_profile_python \ + from cython.python_backend import coincidence_single_python \ as coincidence_impl if max_tau is None: diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index 66ffcb6..e259903 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -28,12 +28,13 @@ def test_single_prof(): coincidence_single_profile_cython as coincidence_impl except ImportError: from pyspike.cython.python_backend import \ - coincidence_single_profile_python as coincidence_impl + coincidence_single_python as coincidence_impl sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st2, 5.0)) coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) for i, t in enumerate(st1): assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], "At index %d" % i) -- cgit v1.2.3 From 78f1404168447b3173302fa98817a0d964d8f290 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 29 Mar 2016 12:43:21 +0200 Subject: updated test case to new spike sync behavior --- test/test_directionality.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_directionality.py b/test/test_directionality.py index 7ca5d58..3e12177 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -82,7 +82,7 @@ def test_spike_train_order(): expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) - expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 4]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) assert_array_equal(f.x, expected_x) assert_array_equal(f.y, expected_y) -- cgit v1.2.3 From 177c97bd11c4b8dca87ca70e8a44857e14b6d87c Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 29 Mar 2016 14:14:39 +0200 Subject: python3 fixes --- pyspike/spike_directionality.py | 21 +++++++++++---------- pyspike/spike_sync.py | 2 +- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index e1f5f16..cd2bf04 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -3,8 +3,9 @@ # Copyright 2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import numpy as np -from math import exp import pyspike from pyspike import DiscreteFunc from functools import partial @@ -22,7 +23,7 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_directionality_cython as spike_directionality_impl if max_tau is None: max_tau = 0.0 @@ -95,7 +96,7 @@ def spike_directionality_profiles(spike_trains, indices=None, # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_directionality_profiles_cython as profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -103,7 +104,7 @@ def spike_directionality_profiles(spike_trains, indices=None, PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.directionality_python_backend import \ + from .cython.directionality_python_backend import \ spike_directionality_profile_python as profile_impl if max_tau is None: @@ -143,7 +144,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_profile_cython as \ spike_train_order_profile_impl except ImportError: @@ -153,7 +154,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.directionality_python_backend import \ + from .cython.directionality_python_backend import \ spike_train_order_profile_python as spike_train_order_profile_impl if max_tau is None: @@ -180,7 +181,7 @@ def spike_train_order(spike_train1, spike_train2, normalize=True, # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_cython as spike_train_order_impl if max_tau is None: max_tau = 0.0 @@ -246,7 +247,7 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): T_end = 1E-5 * T_start # final temperature alpha = 0.9 # cooling factor - from cython.cython_simulated_annealing import sim_ann_cython as sim_ann + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann p, A, total_iter = sim_ann(D, T_start, T_end, alpha) @@ -305,8 +306,8 @@ def permutate_matrix(D, p): """ N = len(D) D_p = np.empty_like(D) - for n in xrange(N): - for m in xrange(N): + for n in range(N): + for m in range(N): D_p[n, m] = D[p[n], p[m]] return D_p diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 28e252c..32e6bf4 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -323,7 +323,7 @@ Falling back to slow python backend.") for i, st in enumerate(spike_trains): coincidences = np.zeros_like(st) - for j in xrange(N): + for j in range(N): if i == j: continue coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, -- cgit v1.2.3 From 1db8afcf2265bf1d03eff89cd61db9fe9ff72064 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 29 Mar 2016 14:46:49 +0200 Subject: another python3 fix --- pyspike/cython/python_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index a4a0c9e..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -443,7 +443,7 @@ def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): N2 = len(spikes2) j = -1 c = np.zeros(N1) # coincidences - for i in xrange(N1): + for i in range(N1): while j < N2-1 and spikes2[j+1] < spikes1[i]: # move forward until spikes2[j] is the last spike before spikes1[i] # note that if spikes2[j] is after spikes1[i] we dont do anything -- cgit v1.2.3 From 0c710a2450a055091fe6de2313d6240671d1c74d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 12:10:01 +0200 Subject: reorganized directionality module --- pyspike/__init__.py | 9 +- pyspike/cython/cython_directionality.pyx | 223 ++++++++++++++++++++++ pyspike/cython/directionality_python_backend.py | 89 +++++++++ pyspike/spike_directionality.py | 244 ++++++++++++++++++++++++ setup.py | 19 +- test/test_directionality.py | 41 ++++ 6 files changed, 619 insertions(+), 6 deletions(-) create mode 100644 pyspike/cython/cython_directionality.pyx create mode 100644 pyspike/cython/directionality_python_backend.py create mode 100644 pyspike/spike_directionality.py create mode 100644 test/test_directionality.py diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 08253fb..7fa5265 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -7,8 +7,8 @@ Distributed under the BSD License from __future__ import absolute_import __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc", "directionality"] + "spikes", "spike_directionality", "SpikeTrain", + "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from .PieceWiseConstFunc import PieceWiseConstFunc from .PieceWiseLinFunc import PieceWiseLinFunc @@ -27,6 +27,11 @@ from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes +from spike_directionality import spike_directionality, \ + spike_directionality_matrix, spike_train_order_profile, \ + optimal_spike_train_order_from_matrix, optimal_spike_train_order, \ + permutate_matrix + # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx new file mode 100644 index 0000000..e1f63c4 --- /dev/null +++ b/pyspike/cython/cython_directionality.pyx @@ -0,0 +1,223 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_train_order_profile_cython +############################################################ +def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + + +############################################################ +# spike_order_values_cython +############################################################ +def spike_order_values_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double[:] a1 = np.zeros(N1) # asymmetry values + cdef double[:] a2 = np.zeros(N2) # asymmetry values + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + a1[i] = -1 + a2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + a1[i] = +1 + a2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # equal spike times: zero asymmetry value + a1[i] = 0 + a2[j] = 0 + + return a1, a2 + + +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int asym = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + asym -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + asym += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if asym == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + asym = 1 + mp = 1 + + return asym, mp diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py new file mode 100644 index 0000000..e14238f --- /dev/null +++ b/pyspike/cython/directionality_python_backend.py @@ -0,0 +1,89 @@ +""" directionality_python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_python(spikes1, spikes2, t_start, t_end, max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + n = 0 + st = np.zeros(N1 + N2 + 2) # spike times + a = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py new file mode 100644 index 0000000..0e69cb5 --- /dev/null +++ b/pyspike/spike_directionality.py @@ -0,0 +1,244 @@ +# Module containing functions to compute the SPIKE directionality and the +# spike train order profile +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +import numpy as np +from math import exp +import pyspike +from pyspike import DiscreteFunc + + +############################################################ +# spike_directionality +############################################################ +def spike_directionality(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike directionality for two spike trains. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_impl + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = _spike_directionality_profile(spike_train1, + spike_train2, + max_tau).integral(interval) + if normalize: + return 1.0*c/mp + else: + return c + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError() + + +############################################################ +# spike_directionality_matrix +############################################################ +def spike_directionality_matrix(spike_trains, normalize=True, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionaity matrix for the given spike trains. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_directionality(spike_trains[i], spike_trains[j], normalize, + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the spike train symmetry value for each spike in each spike + train. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for reulting asymmetry values + asymmetry_list = [np.zeros_like(st.spikes) for st in spike_trains] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from cython.cython_directionality import \ + spike_order_values_cython as spike_order_values_impl + except ImportError: + raise NotImplementedError() +# if not(pyspike.disable_backend_warning): +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") +# # use python backend +# from cython.python_backend import coincidence_python \ +# as coincidence_profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + a1, a2 = spike_order_values_impl(spike_trains[i].spikes, + spike_trains[j].spikes, + spike_trains[i].t_start, + spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += a1 + asymmetry_list[j] += a2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + +############################################################ +# optimal_spike_train_order_from_matrix +############################################################ +def optimal_spike_train_order_from_matrix(D, full_output=False): + """ finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Internal function, don't call directly! Use optimal_asymmetry_order + instead. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T = 2*np.max(D) # starting temperature + T_end = 1E-5 * T # final temperature + alpha = 0.9 # cooling factor + total_iter = 0 + while T > T_end: + iterations = 0 + succ_iter = 0 + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + ind1 = np.random.randint(N-1) + delta_A = -2*D[p[ind1], p[ind1+1]] + if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): + # swap indices + p[ind1], p[ind1+1] = p[ind1+1], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + break + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# optimal_spike_train_order +############################################################ +def optimal_spike_train_order(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ finds the best sorting of the given spike trains via simulated + annealing. + Returns the optimal permutation p and A value. + """ + D = spike_directionality_matrix(spike_trains, normalize=False, + indices=indices, interval=interval, + max_tau=max_tau) + return optimal_spike_train_order_from_matrix(D, full_output) + + +############################################################ +# permutate_matrix +############################################################ +def permutate_matrix(D, p): + """ Applies the permutation p to the columns and rows of matrix D. + Return the new permutated matrix. + """ + N = len(D) + D_p = np.empty_like(D) + for n in xrange(N): + for m in xrange(N): + D_p[n, m] = D[p[n], p[m]] + return D_p + + +# internal helper functions + +############################################################ +# _spike_directionality_profile +############################################################ +def _spike_directionality_profile(spike_train1, spike_train2, + max_tau=None): + """ Computes the spike delay asymmetry profile A(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.directionality_python_backend import \ + spike_train_order_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) diff --git a/setup.py b/setup.py index 5b9e677..9ba1da6 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,8 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/cython/cython_directionality.c"): use_c = True else: use_c = False @@ -45,7 +46,9 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]) + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -55,7 +58,9 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]) + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend @@ -96,5 +101,11 @@ train similarity', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' - ] + ], + package_data={ + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', + 'cython/cython_distances.c', + 'cython/cython_directionality.c'], + 'test': ['Spike_testdata.txt'] + } ) diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..5c3da00 --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,41 @@ +""" test_spike_delay_asymmetry.py + +Tests the asymmetry functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc +from pyspike.spike_directionality import _spike_directionality_profile + + +def test_profile(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + expected_x = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = _spike_directionality_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 4.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000]) + expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0]) + expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1]) + + f = _spike_directionality_profile(st1, st3) + assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) -- cgit v1.2.3 From 827be39a1646f1e518f7210b8943006f5741144d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 17:40:09 +0200 Subject: further refactoring of directionality --- pyspike/__init__.py | 7 +- pyspike/cython/cython_directionality.pyx | 109 ++++++++++----- pyspike/spike_directionality.py | 232 ++++++++++++++++++++++--------- test/test_directionality.py | 80 ++++++++--- 4 files changed, 311 insertions(+), 117 deletions(-) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 7fa5265..10d2936 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -28,9 +28,10 @@ from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ merge_spike_trains, generate_poisson_spikes from spike_directionality import spike_directionality, \ - spike_directionality_matrix, spike_train_order_profile, \ - optimal_spike_train_order_from_matrix, optimal_spike_train_order, \ - permutate_matrix + spike_directionality_profiles, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order, \ + spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ + optimal_spike_train_order, permutate_matrix # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx index e1f63c4..ac37690 100644 --- a/pyspike/cython/cython_directionality.pyx +++ b/pyspike/cython/cython_directionality.pyx @@ -128,21 +128,68 @@ def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, return st, a, mp +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + ############################################################ -# spike_order_values_cython +# spike_directionality_profiles_cython ############################################################ -def spike_order_values_cython(double[:] spikes1, - double[:] spikes2, - double t_start, double t_end, - double max_tau): +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): cdef int N1 = len(spikes1) cdef int N2 = len(spikes2) cdef int i = -1 cdef int j = -1 - cdef double[:] a1 = np.zeros(N1) # asymmetry values - cdef double[:] a2 = np.zeros(N2) # asymmetry values + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: @@ -153,8 +200,8 @@ def spike_order_values_cython(double[:] spikes1, # coincidence between the current spike and the previous spike # spike from spike train 1 after spike train 2 # leading spike gets +1, following spike -1 - a1[i] = -1 - a2[j] = +1 + d1[i] = -1 + d2[j] = +1 elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) @@ -162,62 +209,54 @@ def spike_order_values_cython(double[:] spikes1, # coincidence between the current spike and the previous spike # spike from spike train 1 before spike train 2 # leading spike gets +1, following spike -1 - a1[i] = +1 - a2[j] = -1 + d1[i] = +1 + d2[j] = -1 else: # spikes1[i+1] = spikes2[j+1] # advance in both spike trains j += 1 i += 1 # equal spike times: zero asymmetry value - a1[i] = 0 - a2[j] = 0 + d1[i] = 0 + d2[j] = 0 - return a1, a2 + return d1, d2 ############################################################ -# spike_train_order_cython +# spike_directionality_cython ############################################################ -def spike_train_order_cython(double[:] spikes1, double[:] spikes2, - double t_start, double t_end, double max_tau): +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): cdef int N1 = len(spikes1) cdef int N2 = len(spikes2) cdef int i = -1 cdef int j = -1 - cdef int asym = 0 - cdef int mp = 0 + cdef int d = 0 # directionality value cdef double interval = t_end - t_start cdef double tau while i + j < N1 + N2 - 2: if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 - mp += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike - # spike in spike train 2 appeared before spike in spike train 1 - # mark with -1 - asym -= 2 + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 - mp += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike - # spike in spike train 1 appeared before spike in spike train 2 - # mark with +1 - asym += 2 + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 else: # spikes1[i+1] = spikes2[j+1] # advance in both spike trains j += 1 i += 1 - # add only one event with multiplicity 2, but no asymmetry counting - mp += 2 - - if asym == 0 and mp == 0: - # empty spike trains -> spike sync = 1 by definition - asym = 1 - mp = 1 - return asym, mp + return d diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index 0e69cb5..f608ecc 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -7,6 +7,8 @@ import numpy as np from math import exp import pyspike from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi ############################################################ @@ -21,23 +23,21 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, # for optimal performance try: from cython.cython_directionality import \ - spike_train_order_cython as spike_train_order_impl + spike_directionality_cython as spike_directionality_impl if max_tau is None: max_tau = 0.0 - c, mp = spike_train_order_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) except ImportError: - # Cython backend not available: fall back to profile averaging - c, mp = _spike_directionality_profile(spike_train1, - spike_train2, - max_tau).integral(interval) + raise NotImplementedError() if normalize: - return 1.0*c/mp + return 1.0*d/c else: - return c + return d else: # some specific interval is provided: not yet implemented raise NotImplementedError() @@ -70,11 +70,11 @@ def spike_directionality_matrix(spike_trains, normalize=True, indices=None, ############################################################ -# spike_train_order_profile +# spike_directionality_profiles ############################################################ -def spike_train_order_profile(spike_trains, indices=None, - interval=None, max_tau=None): - """ Computes the spike train symmetry value for each spike in each spike +def spike_directionality_profiles(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality value for each spike in each spike train. """ if indices is None: @@ -92,7 +92,7 @@ def spike_train_order_profile(spike_trains, indices=None, # cython implementation try: from cython.cython_directionality import \ - spike_order_values_cython as spike_order_values_impl + spike_directionality_profiles_cython as profile_impl except ImportError: raise NotImplementedError() # if not(pyspike.disable_backend_warning): @@ -107,11 +107,9 @@ def spike_train_order_profile(spike_trains, indices=None, max_tau = 0.0 for i, j in pairs: - a1, a2 = spike_order_values_impl(spike_trains[i].spikes, - spike_trains[j].spikes, - spike_trains[i].t_start, - spike_trains[i].t_end, - max_tau) + a1, a2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) asymmetry_list[i] += a1 asymmetry_list[j] += a2 for a in asymmetry_list: @@ -119,6 +117,114 @@ def spike_train_order_profile(spike_trains, indices=None, return asymmetry_list +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-distance profile :math:`S_{sync}(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.directionality_python_backend import \ + spike_train_order_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike delay asymmetry value for two spike trains. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_impl + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau).integral(interval) + if normalize: + return 1.0*c/mp + else: + return c + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError() + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike delay asymmetry profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + # average_dist.mul_scalar(1.0/M) # no normalization here! + return average_prof + + ############################################################ # optimal_spike_train_order_from_matrix ############################################################ @@ -195,50 +301,50 @@ def permutate_matrix(D, p): ############################################################ # _spike_directionality_profile ############################################################ -def _spike_directionality_profile(spike_train1, spike_train2, - max_tau=None): - """ Computes the spike delay asymmetry profile A(t) of the two given - spike trains. Returns the profile as a DiscreteFunction object. +# def _spike_directionality_profile(spike_train1, spike_train2, +# max_tau=None): +# """ Computes the spike delay asymmetry profile A(t) of the two given +# spike trains. Returns the profile as a DiscreteFunction object. - :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` - :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` - :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. - :returns: The spike-distance profile :math:`S_{sync}(t)`. - :rtype: :class:`pyspike.function.DiscreteFunction` +# :param spike_train1: First spike train. +# :type spike_train1: :class:`pyspike.SpikeTrain` +# :param spike_train2: Second spike train. +# :type spike_train2: :class:`pyspike.SpikeTrain` +# :param max_tau: Maximum coincidence window size. If 0 or `None`, the +# coincidence window has no upper bound. +# :returns: The spike-distance profile :math:`S_{sync}(t)`. +# :rtype: :class:`pyspike.function.DiscreteFunction` - """ - # check whether the spike trains are defined for the same interval - assert spike_train1.t_start == spike_train2.t_start, \ - "Given spike trains are not defined on the same interval!" - assert spike_train1.t_end == spike_train2.t_end, \ - "Given spike trains are not defined on the same interval!" +# """ +# # check whether the spike trains are defined for the same interval +# assert spike_train1.t_start == spike_train2.t_start, \ +# "Given spike trains are not defined on the same interval!" +# assert spike_train1.t_end == spike_train2.t_end, \ +# "Given spike trains are not defined on the same interval!" - # cython implementation - try: - from cython.cython_directionality import \ - spike_train_order_profile_cython as \ - spike_train_order_profile_impl - except ImportError: - # raise NotImplementedError() - if not(pyspike.disable_backend_warning): - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from cython.directionality_python_backend import \ - spike_train_order_python as spike_train_order_profile_impl +# # cython implementation +# try: +# from cython.cython_directionality import \ +# spike_train_order_profile_cython as \ +# spike_train_order_profile_impl +# except ImportError: +# # raise NotImplementedError() +# if not(pyspike.disable_backend_warning): +# print("Warning: spike_distance_cython not found. Make sure that \ +# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +# Falling back to slow python backend.") +# # use python backend +# from cython.directionality_python_backend import \ +# spike_train_order_python as spike_train_order_profile_impl - if max_tau is None: - max_tau = 0.0 +# if max_tau is None: +# max_tau = 0.0 - times, coincidences, multiplicity \ - = spike_train_order_profile_impl(spike_train1.spikes, - spike_train2.spikes, - spike_train1.t_start, - spike_train1.t_end, - max_tau) +# times, coincidences, multiplicity \ +# = spike_train_order_profile_impl(spike_train1.spikes, +# spike_train2.spikes, +# spike_train1.t_start, +# spike_train1.t_end, +# max_tau) - return DiscreteFunc(times, coincidences, multiplicity) +# return DiscreteFunc(times, coincidences, multiplicity) diff --git a/test/test_directionality.py b/test/test_directionality.py index 5c3da00..7ca5d58 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -14,28 +14,76 @@ from numpy.testing import assert_equal, assert_almost_equal, \ import pyspike as spk from pyspike import SpikeTrain, DiscreteFunc -from pyspike.spike_directionality import _spike_directionality_profile +# from pyspike.spike_directionality import _spike_directionality_profile -def test_profile(): +def test_spike_directionality(): st1 = SpikeTrain([100, 200, 300], [0, 1000]) st2 = SpikeTrain([105, 205, 300], [0, 1000]) - expected_x = np.array([0, 100, 105, 200, 205, 300, 1000]) - expected_y = np.array([1, 1, 1, 1, 1, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 2, 2]) - - f = _spike_directionality_profile(st1, st2) - - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) - assert_almost_equal(f.avrg(), 2.0/3.0) assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), - 4.0) + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_profiles([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) st3 = SpikeTrain([105, 195, 500], [0, 1000]) - expected_x = np.array([0, 100, 105, 195, 200, 300, 500, 1000]) - expected_y = np.array([1, 1, 1, -1, -1, 0, 0, 0]) - expected_mp = np.array([1, 1, 1, 1, 1, 1, 1, 1]) - f = _spike_directionality_profile(st1, st3) - assert f.almost_equal(DiscreteFunc(expected_x, expected_y, expected_mp)) + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 4]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) -- cgit v1.2.3 From 66bf08417c651d1c0d96cb980571efb2043d7f1a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 26 Aug 2015 18:26:58 +0200 Subject: completed python directionality backend --- pyspike/cython/directionality_python_backend.py | 57 ++++++++++++++++++++++++- pyspike/spike_directionality.py | 29 +++++++------ 2 files changed, 72 insertions(+), 14 deletions(-) diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py index e14238f..c1d820b 100644 --- a/pyspike/cython/directionality_python_backend.py +++ b/pyspike/cython/directionality_python_backend.py @@ -15,7 +15,62 @@ import numpy as np ############################################################ # spike_train_order_python ############################################################ -def spike_train_order_python(spikes1, spikes2, t_start, t_end, max_tau): +def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + d1 = np.zeros(N1) # directionality values + d2 = np.zeros(N2) # directionality values + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in first spike train occurs after second + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in second spike train occurs after first + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): def get_tau(spikes1, spikes2, i, j, max_tau): m = t_end - t_start # use interval as initial tau diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index f608ecc..cda7fe3 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -33,7 +33,11 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, max_tau) c = len(spike_train1.spikes) except ImportError: - raise NotImplementedError() + d1, x = spike_directionality_profiles([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) + d = np.sum(d1) + c = len(spike_train1.spikes) if normalize: return 1.0*d/c else: @@ -94,24 +98,23 @@ def spike_directionality_profiles(spike_trains, indices=None, from cython.cython_directionality import \ spike_directionality_profiles_cython as profile_impl except ImportError: - raise NotImplementedError() -# if not(pyspike.disable_backend_warning): -# print("Warning: spike_distance_cython not found. Make sure that \ -# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -# Falling back to slow python backend.") -# # use python backend -# from cython.python_backend import coincidence_python \ -# as coincidence_profile_impl + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl if max_tau is None: max_tau = 0.0 for i, j in pairs: - a1, a2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, spike_trains[i].t_start, spike_trains[i].t_end, max_tau) - asymmetry_list[i] += a1 - asymmetry_list[j] += a2 + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 for a in asymmetry_list: a /= len(spike_trains)-1 return asymmetry_list @@ -151,7 +154,7 @@ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.directionality_python_backend import \ - spike_train_order_python as spike_train_order_profile_impl + spike_train_order_profile_python as spike_train_order_profile_impl if max_tau is None: max_tau = 0.0 -- cgit v1.2.3 From a5e6a12a619cb9528a4cf7f3ef8f082e5eb877c2 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 9 Sep 2015 17:51:03 +0200 Subject: added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. --- pyspike/cython/cython_profiles.pyx | 31 +++++++++++++++++++++++++++ pyspike/spike_sync.py | 40 ++++++++++++++++++++++++++++++++++- test/test_sync_filter.py | 43 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 test/test_sync_filter.py diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 4a42cdb..fe08cb7 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -450,3 +450,34 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + print i, j, spikes1[i], spikes2[j], tau + if j > -1 and spikes1[i]-spikes2[j] < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1: + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + print i, j, spikes1[i], spikes2[j], tau + if spikes2[j]-spikes1[i] < tau: + # current spike in st1 is coincident + c[i] = 1 + + return c diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 80f7805..d37731f 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -8,7 +8,7 @@ from __future__ import absolute_import import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -290,3 +290,41 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync_bi, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + + # cython implementation + try: + from cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cytho not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from cython.python_backend import coincidence_single_profile_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N).remove(i): + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + return filtered_spike_trains diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..ce03b23 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,43 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_cython(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import coincidence_single_profile_python \ + as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) -- cgit v1.2.3 From 18ea80e2d01e9eb4ceee17219f91098efbcdf67c Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 10 Oct 2015 20:45:09 +0200 Subject: spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. --- pyspike/__init__.py | 7 +- pyspike/cython/cython_distances.pyx | 200 ++++++++++++++++++++++++++ pyspike/cython/cython_profiles.pyx | 14 +- pyspike/cython/cython_simulated_annealing.pyx | 82 +++++++++++ pyspike/spike_directionality.py | 54 ++++--- pyspike/spike_sync.py | 19 ++- setup.py | 14 +- test/test_sync_filter.py | 61 +++++++- 8 files changed, 408 insertions(+), 43 deletions(-) create mode 100644 pyspike/cython/cython_simulated_annealing.pyx diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 10d2936..61c5c4f 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -19,9 +19,10 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ isi_distance_multi, isi_distance_matrix from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix -from .spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix -from .psth import psth +from spike_sync import spike_sync_profile, spike_sync,\ + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync +from psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index ac5f226..d4070ae 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index fe08cb7..eb4d157 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -466,18 +466,20 @@ def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, cdef double tau for i in xrange(N1): while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - print i, j, spikes1[i], spikes2[j], tau - if j > -1 and spikes1[i]-spikes2[j] < tau: + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: # current spike in st1 is coincident c[i] = 1 - if j < N2-1: + if j < N2-1 and spikes2[j] < spikes1[i]: + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike j += 1 tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) - print i, j, spikes1[i], spikes2[j], tau - if spikes2[j]-spikes1[i] < tau: + if fabs(spikes2[j]-spikes1[i]) < tau: # current spike in st1 is coincident c[i] = 1 - return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index cda7fe3..e1f5f16 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -242,27 +242,39 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): p = np.arange(N) - T = 2*np.max(D) # starting temperature - T_end = 1E-5 * T # final temperature - alpha = 0.9 # cooling factor - total_iter = 0 - while T > T_end: - iterations = 0 - succ_iter = 0 - while iterations < 100*N and succ_iter < 10*N: - # exchange two rows and cols - ind1 = np.random.randint(N-1) - delta_A = -2*D[p[ind1], p[ind1+1]] - if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): - # swap indices - p[ind1], p[ind1+1] = p[ind1+1], p[ind1] - A += delta_A - succ_iter += 1 - iterations += 1 - total_iter += iterations - T *= alpha # cool down - if succ_iter == 0: - break + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + from cython.cython_simulated_annealing import sim_ann_cython as sim_ann + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + # T = T_start + # total_iter = 0 + # while T > T_end: + # iterations = 0 + # succ_iter = 0 + # # equilibrate for 100*N steps or 10*N successful steps + # while iterations < 100*N and succ_iter < 10*N: + # # exchange two rows and cols + # ind1 = np.random.randint(N-1) + # if ind1 < N-1: + # ind2 = ind1+1 + # else: # this can never happend + # ind2 = 0 + # delta_A = -2*D[p[ind1], p[ind2]] + # if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): + # # swap indices + # p[ind1], p[ind2] = p[ind2], p[ind1] + # A += delta_A + # succ_iter += 1 + # iterations += 1 + # total_iter += iterations + # T *= alpha # cool down + # if succ_iter == 0: + # break + if full_output: return p, A, total_iter else: diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index d37731f..1d2ecdb 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -295,12 +295,14 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): ############################################################ # filter_by_spike_sync ############################################################ -def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): """ Removes the spikes with a multi-variate spike_sync value below threshold. """ N = len(spike_trains) filtered_spike_trains = [] + removed_spike_trains = [] # cython implementation try: @@ -308,7 +310,7 @@ def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None): as coincidence_impl except ImportError: if not(pyspike.disable_backend_warning): - print("Warning: coincidence_single_profile_cytho not found. Make \ + print("Warning: coincidence_single_profile_cython not found. Make \ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") @@ -321,10 +323,19 @@ Falling back to slow python backend.") for i, st in enumerate(spike_trains): coincidences = np.zeros_like(st) - for j in range(N).remove(i): + for j in xrange(N): + if i == j: + continue coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, st.t_start, st.t_end, max_tau) filtered_spikes = st[coincidences > threshold*(N-1)] filtered_spike_trains.append(SpikeTrain(filtered_spikes, [st.t_start, st.t_end])) - return filtered_spike_trains + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index 9ba1da6..808a122 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,8 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ os.path.isfile("pyspike/cython/cython_distances.c") and \ - os.path.isfile("pyspike/cython/cython_directionality.c"): + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -48,7 +49,9 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.pyx"]), Extension("pyspike.cython.cython_directionality", - ["pyspike/cython/cython_directionality.pyx"]) + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -60,7 +63,9 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_distances", ["pyspike/cython/cython_distances.c"]), Extension("pyspike.cython.cython_directionality", - ["pyspike/cython/cython_directionality.c"]) + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend @@ -105,7 +110,8 @@ train similarity', package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', 'cython/cython_distances.c', - 'cython/cython_directionality.c'], + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], 'test': ['Spike_testdata.txt'] } ) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index ce03b23..66ffcb6 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -17,17 +17,18 @@ import pyspike as spk from pyspike import SpikeTrain -def test_cython(): +def test_single_prof(): st1 = np.array([1.0, 2.0, 3.0, 4.0]) st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) # cython implementation try: - from pyspike.cython.cython_profiles import coincidence_single_profile_cython \ - as coincidence_impl + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl except ImportError: - from pyspike.cython.python_backend import coincidence_single_profile_python \ - as coincidence_impl + from pyspike.cython.python_backend import \ + coincidence_single_profile_python as coincidence_impl sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st2, 5.0)) @@ -41,3 +42,53 @@ def test_cython(): for i, t in enumerate(st2): assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3 From 6c68690b992a6dfabf8259e265cf427fc6f193eb Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 12 Oct 2015 11:20:03 +0200 Subject: added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well --- pyspike/cython/cython_profiles.pyx | 2 +- pyspike/cython/python_backend.py | 67 ++++++++++++++++++++++++++++---------- pyspike/spike_sync.py | 2 +- test/test_sync_filter.py | 3 +- 4 files changed, 53 insertions(+), 21 deletions(-) diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index eb4d157..aa24db4 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -473,7 +473,7 @@ def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: # current spike in st1 is coincident c[i] = 1 - if j < N2-1 and spikes2[j] < spikes1[i]: + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): # in case spikes2[j] is before spikes1[i] it has to be the one # right before (see above), hence we move one forward and also # check the next spike diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..a4a0c9e 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -433,6 +434,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 1d2ecdb..66aa906 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -315,7 +315,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import coincidence_single_profile_python \ + from cython.python_backend import coincidence_single_python \ as coincidence_impl if max_tau is None: diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index 66ffcb6..e259903 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -28,12 +28,13 @@ def test_single_prof(): coincidence_single_profile_cython as coincidence_impl except ImportError: from pyspike.cython.python_backend import \ - coincidence_single_profile_python as coincidence_impl + coincidence_single_python as coincidence_impl sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st2, 5.0)) coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) for i, t in enumerate(st1): assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], "At index %d" % i) -- cgit v1.2.3 From 67f20e486b9d7da6a9c9e1cf51a1f4138b642e61 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 29 Mar 2016 12:43:21 +0200 Subject: updated test case to new spike sync behavior --- test/test_directionality.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_directionality.py b/test/test_directionality.py index 7ca5d58..3e12177 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -82,7 +82,7 @@ def test_spike_train_order(): expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) - expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 4]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) assert_array_equal(f.x, expected_x) assert_array_equal(f.y, expected_y) -- cgit v1.2.3 From 029c5716977bb058114339bcadf285c738209776 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 29 Mar 2016 14:14:39 +0200 Subject: python3 fixes --- pyspike/spike_directionality.py | 21 +++++++++++---------- pyspike/spike_sync.py | 2 +- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index e1f5f16..cd2bf04 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -3,8 +3,9 @@ # Copyright 2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import numpy as np -from math import exp import pyspike from pyspike import DiscreteFunc from functools import partial @@ -22,7 +23,7 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_directionality_cython as spike_directionality_impl if max_tau is None: max_tau = 0.0 @@ -95,7 +96,7 @@ def spike_directionality_profiles(spike_trains, indices=None, # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_directionality_profiles_cython as profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -103,7 +104,7 @@ def spike_directionality_profiles(spike_trains, indices=None, PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.directionality_python_backend import \ + from .cython.directionality_python_backend import \ spike_directionality_profile_python as profile_impl if max_tau is None: @@ -143,7 +144,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_profile_cython as \ spike_train_order_profile_impl except ImportError: @@ -153,7 +154,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.directionality_python_backend import \ + from .cython.directionality_python_backend import \ spike_train_order_profile_python as spike_train_order_profile_impl if max_tau is None: @@ -180,7 +181,7 @@ def spike_train_order(spike_train1, spike_train2, normalize=True, # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_cython as spike_train_order_impl if max_tau is None: max_tau = 0.0 @@ -246,7 +247,7 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): T_end = 1E-5 * T_start # final temperature alpha = 0.9 # cooling factor - from cython.cython_simulated_annealing import sim_ann_cython as sim_ann + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann p, A, total_iter = sim_ann(D, T_start, T_end, alpha) @@ -305,8 +306,8 @@ def permutate_matrix(D, p): """ N = len(D) D_p = np.empty_like(D) - for n in xrange(N): - for m in xrange(N): + for n in range(N): + for m in range(N): D_p[n, m] = D[p[n], p[m]] return D_p diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 66aa906..6c6f9c6 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -323,7 +323,7 @@ Falling back to slow python backend.") for i, st in enumerate(spike_trains): coincidences = np.zeros_like(st) - for j in xrange(N): + for j in range(N): if i == j: continue coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, -- cgit v1.2.3 From d3350f7419898c10f72d62e0c63d044804c4a470 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 29 Mar 2016 14:46:49 +0200 Subject: another python3 fix --- pyspike/cython/python_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index a4a0c9e..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -443,7 +443,7 @@ def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): N2 = len(spikes2) j = -1 c = np.zeros(N1) # coincidences - for i in xrange(N1): + for i in range(N1): while j < N2-1 and spikes2[j+1] < spikes1[i]: # move forward until spikes2[j] is the last spike before spikes1[i] # note that if spikes2[j] is after spikes1[i] we dont do anything -- cgit v1.2.3 From 76906fbe479a4f6319f79f5c7fd32745b293240e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 2 Jun 2018 13:20:08 -0700 Subject: Fix absolute imports in directionality measures --- pyspike/__init__.py | 6 +++--- pyspike/spike_sync.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 61c5c4f..0250abc 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -19,16 +19,16 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ isi_distance_multi, isi_distance_matrix from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix -from spike_sync import spike_sync_profile, spike_sync,\ +from .spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ filter_by_spike_sync -from psth import psth +from .psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes -from spike_directionality import spike_directionality, \ +from .spike_directionality import spike_directionality, \ spike_directionality_profiles, spike_directionality_matrix, \ spike_train_order_profile, spike_train_order, \ spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 6c6f9c6..32e6bf4 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -306,7 +306,7 @@ def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, # cython implementation try: - from cython.cython_profiles import coincidence_single_profile_cython \ + from .cython.cython_profiles import coincidence_single_profile_cython \ as coincidence_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -315,7 +315,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import coincidence_single_python \ + from .cython.python_backend import coincidence_single_python \ as coincidence_impl if max_tau is None: -- cgit v1.2.3 From 110d90117f048a28ed6f9efd00176477dd54c3a6 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 9 Jun 2018 13:13:41 -0700 Subject: remove commented code --- pyspike/spike_directionality.py | 54 ----------------------------------------- test/test_directionality.py | 4 +-- 2 files changed, 2 insertions(+), 56 deletions(-) diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index cd2bf04..d1a525e 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -310,57 +310,3 @@ def permutate_matrix(D, p): for m in range(N): D_p[n, m] = D[p[n], p[m]] return D_p - - -# internal helper functions - -############################################################ -# _spike_directionality_profile -############################################################ -# def _spike_directionality_profile(spike_train1, spike_train2, -# max_tau=None): -# """ Computes the spike delay asymmetry profile A(t) of the two given -# spike trains. Returns the profile as a DiscreteFunction object. - -# :param spike_train1: First spike train. -# :type spike_train1: :class:`pyspike.SpikeTrain` -# :param spike_train2: Second spike train. -# :type spike_train2: :class:`pyspike.SpikeTrain` -# :param max_tau: Maximum coincidence window size. If 0 or `None`, the -# coincidence window has no upper bound. -# :returns: The spike-distance profile :math:`S_{sync}(t)`. -# :rtype: :class:`pyspike.function.DiscreteFunction` - -# """ -# # check whether the spike trains are defined for the same interval -# assert spike_train1.t_start == spike_train2.t_start, \ -# "Given spike trains are not defined on the same interval!" -# assert spike_train1.t_end == spike_train2.t_end, \ -# "Given spike trains are not defined on the same interval!" - -# # cython implementation -# try: -# from cython.cython_directionality import \ -# spike_train_order_profile_cython as \ -# spike_train_order_profile_impl -# except ImportError: -# # raise NotImplementedError() -# if not(pyspike.disable_backend_warning): -# print("Warning: spike_distance_cython not found. Make sure that \ -# PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -# Falling back to slow python backend.") -# # use python backend -# from cython.directionality_python_backend import \ -# spike_train_order_python as spike_train_order_profile_impl - -# if max_tau is None: -# max_tau = 0.0 - -# times, coincidences, multiplicity \ -# = spike_train_order_profile_impl(spike_train1.spikes, -# spike_train2.spikes, -# spike_train1.t_start, -# spike_train1.t_end, -# max_tau) - -# return DiscreteFunc(times, coincidences, multiplicity) diff --git a/test/test_directionality.py b/test/test_directionality.py index 3e12177..63865cc 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -1,6 +1,6 @@ -""" test_spike_delay_asymmetry.py +""" test_directionality.py -Tests the asymmetry functions +Tests the directionality functions Copyright 2015, Mario Mulansky -- cgit v1.2.3 From 4fe9f4373a83902cedd297bd44bf8a97b7357df0 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 9 Jun 2018 13:36:50 -0700 Subject: Add directionality to docs, bump version --- Changelog | 3 +++ Readme.rst | 9 ++------- doc/pyspike.rst | 5 +++++ setup.py | 2 +- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/Changelog b/Changelog index 21b7cb0..88e16cc 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,6 @@ +PySpike v0.6: + * Support for computing spike directionality and spike train order + PySpike v0.5: * First beta release * Python 2.6 support removed diff --git a/Readme.rst b/Readme.rst index 0422dad..74b014b 100644 --- a/Readme.rst +++ b/Readme.rst @@ -31,19 +31,14 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2 Important Changelog ----------------------------- +With version 0.6.0, the spike directionality and spike train order function have been added. + With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated. With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains. This is a breaking change in the function interfaces. Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions. - -Upcoming Functionality -------------------------- - -In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library. -The new measures and algorithms are described in `this preprint `_. - Requirements and Installation ----------------------------- diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 74ab439..9552fa6 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -64,6 +64,11 @@ PSTH :undoc-members: :show-inheritance: +Directionality +.. automodule:: pyspike.spike_directionality + :members: + :undoc-members: + :show-inheritance: Helper functions ........................................ diff --git a/setup.py b/setup.py index 52bebd9..b5b01a6 100644 --- a/setup.py +++ b/setup.py @@ -72,7 +72,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.3', + version='0.6.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], -- cgit v1.2.3 From aed1f8185cf2a3b3f9330e93681fa12b365d287b Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sun, 15 Jul 2018 15:33:22 -0700 Subject: Clean up directionality module, add doxy. --- doc/pyspike.rst | 1 + pyspike/__init__.py | 17 +- pyspike/spike_directionality.py | 446 +++++++++++++++++++++++++++++----------- pyspike/spike_sync.py | 4 +- test/test_directionality.py | 14 +- 5 files changed, 349 insertions(+), 133 deletions(-) diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 9552fa6..3b10d2a 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -65,6 +65,7 @@ PSTH :show-inheritance: Directionality +........................................ .. automodule:: pyspike.spike_directionality :members: :undoc-members: diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 3cf416f..3897d18 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2018, Mario Mulansky Distributed under the BSD License """ @@ -29,16 +29,11 @@ from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ merge_spike_trains, generate_poisson_spikes from .spike_directionality import spike_directionality, \ - spike_directionality_profiles, spike_directionality_matrix, \ - spike_train_order_profile, spike_train_order, \ - spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ - optimal_spike_train_order, permutate_matrix - -from .spike_directionality import spike_directionality, \ - spike_directionality_profiles, spike_directionality_matrix, \ - spike_train_order_profile, spike_train_order, \ - spike_train_order_profile_multi, optimal_spike_train_order_from_matrix, \ - optimal_spike_train_order, permutate_matrix + spike_directionality_values, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order_profile_bi, \ + spike_train_order_profile_multi, spike_train_order, \ + spike_train_order_bi, spike_train_order_multi, \ + optimal_spike_train_sorting, permutate_matrix # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index d1a525e..3a71f0b 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -12,12 +12,109 @@ from functools import partial from pyspike.generic import _generic_profile_multi +############################################################ +# spike_directionality_values +############################################################ +def spike_directionality_values(*args, **kwargs): + """ Computes the spike directionality value for each spike in + each spike train. Returns a list containing an array of spike directionality + values for every given spike train. + + Valid call structures:: + + spike_directionality_values(st1, st2) # returns the bi-variate profile + spike_directionality_values(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_directionality_values(spike_trains) # profile of the list of spike trains + spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window (default=None). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike directionality values :math:`D^n_i` as a list of arrays. + """ + if len(args) == 1: + return _spike_directionality_values_impl(args[0], **kwargs) + else: + return _spike_directionality_values_impl(args, **kwargs) + + +def _spike_directionality_values_impl(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the multi-variate spike directionality profile + of the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if interval is not None: + raise NotImplementedError("Parameter `interval` not supported.") + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for resulting asymmetry values + asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_directionality_profiles_cython as profile_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + ############################################################ # spike_directionality ############################################################ def spike_directionality(spike_train1, spike_train2, normalize=True, interval=None, max_tau=None): - """ Computes the overall spike directionality for two spike trains. + """ Computes the overall spike directionality of the first spike train with + respect to the second spike train. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. """ if interval is None: # distance over the whole interval is requested: use specific function @@ -34,9 +131,14 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, max_tau) c = len(spike_train1.spikes) except ImportError: - d1, x = spike_directionality_profiles([spike_train1, spike_train2], - interval=interval, - max_tau=max_tau) + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use profile. + d1, x = spike_directionality_values([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) d = np.sum(d1) c = len(spike_train1.spikes) if normalize: @@ -45,7 +147,7 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, return d else: # some specific interval is provided: not yet implemented - raise NotImplementedError() + raise NotImplementedError("Parameter `interval` not supported.") ############################################################ @@ -53,7 +155,17 @@ def spike_directionality(spike_train1, spike_train2, normalize=True, ############################################################ def spike_directionality_matrix(spike_trains, normalize=True, indices=None, interval=None, max_tau=None): - """ Computes the spike directionaity matrix for the given spike trains. + """ Computes the spike directionality matrix for the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. """ if indices is None: indices = np.arange(len(spike_trains)) @@ -75,65 +187,53 @@ def spike_directionality_matrix(spike_trains, normalize=True, indices=None, ############################################################ -# spike_directionality_profiles +# spike_train_order_profile ############################################################ -def spike_directionality_profiles(spike_trains, indices=None, - interval=None, max_tau=None): - """ Computes the spike directionality value for each spike in each spike - train. - """ - if indices is None: - indices = np.arange(len(spike_trains)) - indices = np.array(indices) - # check validity of indices - assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ - "Invalid index list." - # list of arrays for reulting asymmetry values - asymmetry_list = [np.zeros_like(st.spikes) for st in spike_trains] - # generate a list of possible index pairs - pairs = [(indices[i], j) for i in range(len(indices)) - for j in indices[i+1:]] +def spike_train_order_profile(*args, **kwargs): + """ Computes the spike train order profile :math:`E(t)` of the given + spike trains. Returns the profile as a DiscreteFunction object. - # cython implementation - try: - from .cython.cython_directionality import \ - spike_directionality_profiles_cython as profile_impl - except ImportError: - if not(pyspike.disable_backend_warning): - print("Warning: spike_distance_cython not found. Make sure that \ -PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ -Falling back to slow python backend.") - # use python backend - from .cython.directionality_python_backend import \ - spike_directionality_profile_python as profile_impl + Valid call structures:: - if max_tau is None: - max_tau = 0.0 + spike_train_order_profile(st1, st2) # returns the bi-variate profile + spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3 + # spike trains - for i, j in pairs: - d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, - spike_trains[i].t_start, spike_trains[i].t_end, - max_tau) - asymmetry_list[i] += d1 - asymmetry_list[j] += d2 - for a in asymmetry_list: - a /= len(spike_trains)-1 - return asymmetry_list + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order_profile(spike_trains) # profile of the list of spike trains + spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window, `default=None`. + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike train order profile :math:`E(t)` + :rtype: :class:`.DiscreteFunction` + """ + if len(args) == 1: + return spike_train_order_profile_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_profile_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_profile_multi(args, **kwargs) ############################################################ -# spike_train_order_profile +# spike_train_order_profile_bi ############################################################ -def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): +def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None): """ Computes the spike train order profile P(t) of the two given spike trains. Returns the profile as a DiscreteFunction object. + :param spike_train1: First spike train. :type spike_train1: :class:`pyspike.SpikeTrain` :param spike_train2: Second spike train. :type spike_train2: :class:`pyspike.SpikeTrain` :param max_tau: Maximum coincidence window size. If 0 or `None`, the coincidence window has no upper bound. - :returns: The spike-distance profile :math:`S_{sync}(t)`. + :returns: The spike train order profile :math:`E(t)`. :rtype: :class:`pyspike.function.DiscreteFunction` """ # check whether the spike trains are defined for the same interval @@ -171,21 +271,56 @@ Falling back to slow python backend.") ############################################################ -# spike_train_order +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike train order profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + return average_prof + + + +############################################################ +# _spike_train_order_impl ############################################################ -def spike_train_order(spike_train1, spike_train2, normalize=True, - interval=None, max_tau=None): - """ Computes the overall spike delay asymmetry value for two spike trains. +def _spike_train_order_impl(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Implementation of bi-variatae spike train order value (Synfire Indicator). + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) """ if interval is None: # distance over the whole interval is requested: use specific function # for optimal performance try: from .cython.cython_directionality import \ - spike_train_order_cython as spike_train_order_impl + spike_train_order_cython as spike_train_order_func if max_tau is None: max_tau = 0.0 - c, mp = spike_train_order_impl(spike_train1.spikes, + c, mp = spike_train_order_func(spike_train1.spikes, spike_train2.spikes, spike_train1.t_start, spike_train1.t_end, @@ -194,49 +329,127 @@ def spike_train_order(spike_train1, spike_train2, normalize=True, # Cython backend not available: fall back to profile averaging c, mp = spike_train_order_profile(spike_train1, spike_train2, max_tau).integral(interval) - if normalize: - return 1.0*c/mp - else: - return c + return c, mp else: # some specific interval is provided: not yet implemented - raise NotImplementedError() + raise NotImplementedError("Parameter `interval` not supported.") ############################################################ -# spike_train_order_profile_multi +# spike_train_order ############################################################ -def spike_train_order_profile_multi(spike_trains, indices=None, - max_tau=None): - """ Computes the multi-variate spike delay asymmetry profile for a set of - spike trains. For each spike in the set of spike trains, the multi-variate - profile is defined as the sum of asymmetry values divided by the number of - spike trains pairs involving the spike train of containing this spike, - which is the number of spike trains minus one (N-1). - :param spike_trains: list of :class:`pyspike.SpikeTrain` +def spike_train_order(*args, **kwargs): + """ Computes the spike train order (Synfire Indicator) of the given + spike trains. + + Valid call structures:: + + spike_train_order(st1, st2, normalize=True) # normalized bi-variate + # spike train order + spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order(spike_trains) # result for the list of spike trains + spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + - `max_tau` Upper bound for coincidence window, `default=None`. + - `normalize` Flag indicating if the reslut should be normalized by the + number of spikes , default=`False` + + + :returns: The spike train order value (Synfire Indicator) + """ + if len(args) == 1: + return spike_train_order_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_multi(args, **kwargs) + + +############################################################ +# spike_train_order_bi +############################################################ +def spike_train_order_bi(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for two spike trains. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau) + if normalize: + return 1.0*c/mp + else: + return c + +############################################################ +# spike_train_order_multi +############################################################ +def spike_train_order_multi(spike_trains, indices=None, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for many spike trains. + + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) - :type indices: list or None + :param normalize: Normalize by the number of spike (multiplicity). + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. :param max_tau: Maximum coincidence window size. If 0 or `None`, the coincidence window has no upper bound. - :returns: The multi-variate spike sync profile :math:`(t)` - :rtype: :class:`pyspike.function.DiscreteFunction` + :returns: Spike train order values (Synfire Indicator) F for the given spike trains. + :rtype: double """ - prof_func = partial(spike_train_order_profile, max_tau=max_tau) - average_prof, M = _generic_profile_multi(spike_trains, prof_func, - indices) - # average_dist.mul_scalar(1.0/M) # no normalization here! - return average_prof + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + e_total = 0.0 + m_total = 0.0 + for (i, j) in pairs: + e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j], + interval, max_tau) + e_total += e + m_total += m + + if m == 0.0: + return 1.0 + else: + return e_total/m_total + ############################################################ -# optimal_spike_train_order_from_matrix +# optimal_spike_train_sorting_from_matrix ############################################################ -def optimal_spike_train_order_from_matrix(D, full_output=False): - """ finds the best sorting via simulated annealing. +def _optimal_spike_train_sorting_from_matrix(D, full_output=False): + """ Finds the best sorting via simulated annealing. Returns the optimal permutation p and A value. - Internal function, don't call directly! Use optimal_asymmetry_order - instead. + Not for direct use, call :func:`.optimal_spike_train_sorting` instead. + + :param D: The directionality (Spike-ORDER) matrix. + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. """ N = len(D) A = np.sum(np.triu(D, 0)) @@ -247,35 +460,14 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): T_end = 1E-5 * T_start # final temperature alpha = 0.9 # cooling factor - from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + try: + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + except ImportError: + raise NotImplementedError("PySpike with Cython required for computing spike train" + " sorting!") p, A, total_iter = sim_ann(D, T_start, T_end, alpha) - # T = T_start - # total_iter = 0 - # while T > T_end: - # iterations = 0 - # succ_iter = 0 - # # equilibrate for 100*N steps or 10*N successful steps - # while iterations < 100*N and succ_iter < 10*N: - # # exchange two rows and cols - # ind1 = np.random.randint(N-1) - # if ind1 < N-1: - # ind2 = ind1+1 - # else: # this can never happend - # ind2 = 0 - # delta_A = -2*D[p[ind1], p[ind2]] - # if delta_A > 0.0 or exp(delta_A/T) > np.random.random(): - # # swap indices - # p[ind1], p[ind2] = p[ind2], p[ind1] - # A += delta_A - # succ_iter += 1 - # iterations += 1 - # total_iter += iterations - # T *= alpha # cool down - # if succ_iter == 0: - # break - if full_output: return p, A, total_iter else: @@ -283,26 +475,44 @@ def optimal_spike_train_order_from_matrix(D, full_output=False): ############################################################ -# optimal_spike_train_order +# optimal_spike_train_sorting ############################################################ -def optimal_spike_train_order(spike_trains, indices=None, interval=None, - max_tau=None, full_output=False): - """ finds the best sorting of the given spike trains via simulated - annealing. - Returns the optimal permutation p and A value. +def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ Finds the best sorting of the given spike trains by computing the spike + directionality matrix and optimize the order using simulated annealing. + For a detailed description of the algorithm see: + `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta` + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: time interval filter given as a pair of floats, if None + the full spike trains are used (default=None). + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound (default=None). + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. """ D = spike_directionality_matrix(spike_trains, normalize=False, indices=indices, interval=interval, max_tau=max_tau) - return optimal_spike_train_order_from_matrix(D, full_output) - + return _optimal_spike_train_order_from_matrix(D, full_output) ############################################################ # permutate_matrix ############################################################ def permutate_matrix(D, p): - """ Applies the permutation p to the columns and rows of matrix D. - Return the new permutated matrix. + """ Helper function that applies the permutation p to the columns and rows + of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`. + + :param D: The matrix. + :param d: The permutation. + :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]` """ N = len(D) D_p = np.empty_like(D) diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 32e6bf4..95ef454 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs): if len(args) == 1: return spike_sync_profile_multi(args[0], **kwargs) elif len(args) == 2: - return spike_sync_profile_bi(args[0], args[1]) + return spike_sync_profile_bi(args[0], args[1], **kwargs) else: - return spike_sync_profile_multi(args) + return spike_sync_profile_multi(args, **kwargs) ############################################################ diff --git a/test/test_directionality.py b/test/test_directionality.py index 63865cc..5c7e917 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -14,7 +14,6 @@ from numpy.testing import assert_equal, assert_almost_equal, \ import pyspike as spk from pyspike import SpikeTrain, DiscreteFunc -# from pyspike.spike_directionality import _spike_directionality_profile def test_spike_directionality(): @@ -39,7 +38,7 @@ def test_spike_directionality(): D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) assert_array_equal(D, D_expected) - dir_profs = spk.spike_directionality_profiles([st1, st2, st3]) + dir_profs = spk.spike_directionality_values([st1, st2, st3]) assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) @@ -87,3 +86,14 @@ def test_spike_train_order(): assert_array_equal(f.x, expected_x) assert_array_equal(f.y, expected_y) assert_array_equal(f.mp, expected_mp) + + # Averaging the profile should be the same as computing the synfire indicator directly. + assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3])) + + # We can also compute the synfire indicator from the Directionality Matrix: + D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + print D_matrix + num_spikes = np.sum(len(st) for st in [st1, st2, st3]) + print f.avrg(), num_spikes + syn_fire = np.sum(np.triu(D_matrix)) / num_spikes + assert_almost_equal(f.avrg(), syn_fire) -- cgit v1.2.3 From 5ff5cfa0641490fb63509e0cac30f86562f7ee16 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sun, 15 Jul 2018 15:47:03 -0700 Subject: Remove debug print from tests --- test/test_directionality.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/test_directionality.py b/test/test_directionality.py index 5c7e917..c2e9bfe 100644 --- a/test/test_directionality.py +++ b/test/test_directionality.py @@ -92,8 +92,6 @@ def test_spike_train_order(): # We can also compute the synfire indicator from the Directionality Matrix: D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) - print D_matrix num_spikes = np.sum(len(st) for st in [st1, st2, st3]) - print f.avrg(), num_spikes syn_fire = np.sum(np.triu(D_matrix)) / num_spikes assert_almost_equal(f.avrg(), syn_fire) -- cgit v1.2.3 From 8d707522865627dfb74b11cb1a36464a6047be56 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sat, 21 Jul 2018 17:29:57 -0700 Subject: Fix bug in calling Python backend --- pyspike/spike_directionality.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index 3a71f0b..2cb2bf1 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -328,7 +328,7 @@ def _spike_train_order_impl(spike_train1, spike_train2, except ImportError: # Cython backend not available: fall back to profile averaging c, mp = spike_train_order_profile(spike_train1, spike_train2, - max_tau).integral(interval) + max_tau=max_tau).integral(interval) return c, mp else: # some specific interval is provided: not yet implemented -- cgit v1.2.3 From 5178df8d74bcdde310f8007ac891dd8a1bf4a9d2 Mon Sep 17 00:00:00 2001 From: Jonathan Jouty Date: Sun, 22 Jul 2018 02:50:17 +0200 Subject: Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation --- pyspike/PieceWiseConstFunc.py | 32 ++++++++++++++++++--------- test/test_function.py | 50 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 10 deletions(-) diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 5ce5f27..17fdd3f 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -129,19 +129,31 @@ class PieceWiseConstFunc(object): # no interval given, integrate over the whole spike train a = np.sum((self.x[1:]-self.x[:-1]) * self.y) else: + if interval[0]>interval[1]: + raise ValueError("Invalid averaging interval: interval[0]>=interval[1]") + if interval[0]self.x[-1]: + raise ValueError("Invalid averaging interval: interval[0] 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + if start_ind > end_ind: + # contribution from between two closest edges + a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind] + # minus the part that is not within the interval + a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind] + else: + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] return a def avrg(self, interval=None): diff --git a/test/test_function.py b/test/test_function.py index 92d378d..ddeb4b1 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,6 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy +from nose.tools import raises from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal @@ -49,6 +50,8 @@ def test_pwc(): assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) a = f.avrg([1.5, 3.5]) assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16) + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16) a = f.avrg([1.0, 3.5]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) @@ -120,6 +123,53 @@ def test_pwc_avrg(): assert_array_almost_equal(f1.x, x_expected, decimal=16) assert_array_almost_equal(f1.y, y_expected, decimal=16) +def test_pwc_integral(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + # test full interval + full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; + assert_equal(f1.integral(), full) + assert_equal(f1.integral((np.min(x),np.max(x))), full) + # test part interval, spanning an edge + assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + # test part interval, just over two edges + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=16) + # test part interval, between two edges + assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + # test part interval, start to before and after edge + assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) + assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + # test part interval, before and after edge till end + assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_inv(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((3,2)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_1(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((1,6)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_2(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((-1,3)) def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] -- cgit v1.2.3 From 50b85d976f2f5ec7e40faec1ede047cf45b10bc1 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 19 Sep 2018 16:53:50 -0700 Subject: Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 --- pyspike/PieceWiseLinFunc.py | 42 +++++++++++++++++++++++++++++------------- test/test_function.py | 14 +++++++++++++- 2 files changed, 42 insertions(+), 14 deletions(-) diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8145e63..8faaec4 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -146,31 +146,47 @@ class PieceWiseLinFunc: if interval is None: # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + if start_ind > end_ind: + print(start_ind, end_ind, self.x[start_ind]) + # contribution from between two closest edges + y_x0 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[0]) + y_x1 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[1]) + print(y_x0, y_x1, interval[1] - interval[0]) + integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0]) + print(integral) else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" # first the contribution from between the indices integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) + self.x[start_ind:end_ind]) * + 0.5*(self.y1[start_ind:end_ind] + + self.y2[start_ind:end_ind])) # correction from start to first index integral += (self.x[start_ind]-interval[0]) * 0.5 * \ (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], + intermediate_value(self.x[start_ind-1], self.x[start_ind], self.y1[start_ind-1], self.y2[start_ind-1], - interval[0] - )) + interval[0])) # correction from last index to end integral += (interval[1]-self.x[end_ind]) * 0.5 * \ (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], + intermediate_value(self.x[end_ind], self.x[end_ind+1], self.y1[end_ind], self.y2[end_ind], interval[1] )) diff --git a/test/test_function.py b/test/test_function.py index ddeb4b1..6c04839 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -136,7 +136,7 @@ def test_pwc_integral(): # test part interval, spanning an edge assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) # test part interval, just over two edges - assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=16) + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) # test part interval, between two edges assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) @@ -212,6 +212,18 @@ def test_pwl(): a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16) + # interval between support points + a = f.avrg([1.1, 1.5]) + assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14) + + # starting at a support point + a = f.avrg([1.0, 1.5]) + assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14) + + # start and end at support point + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14) + # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 2.5)]) assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16) -- cgit v1.2.3 From c3986d6ac5b95f6250e6090dfd7a094249dabccf Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 20 Sep 2018 09:50:53 -0700 Subject: Add Spike Order example and Tutorial section Adds an example computing spike order profile and the optimal spike train order. Also adds a section on spike train order to the tutorial. --- doc/tutorial.rst | 66 +++++++++++++++++++++++++++++++++++++++++ examples/spike_train_order.py | 52 ++++++++++++++++++++++++++++++++ pyspike/spike_directionality.py | 2 +- 3 files changed, 119 insertions(+), 1 deletion(-) create mode 100644 examples/spike_train_order.py diff --git a/doc/tutorial.rst b/doc/tutorial.rst index aff03a8..377c0a2 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -231,3 +231,69 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w plt.title("SPIKE-Sync") plt.show() + + +Quantifying Leaders and Followers: Spike Train Order +--------------------------------------- + +PySpike provides functionality to quantify how much a set of spike trains +resembles a synfire pattern (ie perfect leader-follower pattern). For details +on the algorithms please see +`our article in NJP `_. + +The following example computes the Spike Order profile and Synfire Indicator +of two Poissonian spike trains. + +.. code:: python + import numpy as np + from matplotlib import pyplot as plt + import pyspike as spk + + + st1 = spk.generate_poisson_spikes(1.0, [0, 20]) + st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + + d = spk.spike_directionality(st1, st2) + + print "Spike Directionality of two Poissonian spike trains:", d + + E = spk.spike_train_order_profile(st1, st2) + + plt.figure() + x, y = E.get_plottable_data() + plt.plot(x, y, '-ob') + plt.ylim(-1.1, 1.1) + plt.xlabel("t") + plt.ylabel("E") + plt.title("Spike Train Order Profile") + + plt.show() + +Additionally, PySpike can also compute the optimal ordering of the spike trains, +ie the ordering that most resembles a synfire pattern. The following example +computes the optimal order of a set of 20 Poissonian spike trains: + +.. code:: python + + M = 20 + spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + + F_init = spk.spike_train_order(spike_trains) + print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + + D_init = spk.spike_directionality_matrix(spike_trains) + phi, _ = spk.optimal_spike_train_sorting(spike_trains) + F_opt = spk.spike_train_order(spike_trains, indices=phi) + print "Synfire Indicator of optimized spike train sorting:", F_opt + + D_opt = spk.permutate_matrix(D_init, phi) + + plt.figure() + plt.imshow(D_init) + plt.title("Initial Directionality Matrix") + + plt.figure() + plt.imshow(D_opt) + plt.title("Optimized Directionality Matrix") + + plt.show() diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py new file mode 100644 index 0000000..3a42472 --- /dev/null +++ b/examples/spike_train_order.py @@ -0,0 +1,52 @@ +import numpy as np +from matplotlib import pyplot as plt +import pyspike as spk + + +st1 = spk.generate_poisson_spikes(1.0, [0, 20]) +st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + +d = spk.spike_directionality(st1, st2) + +print "Spike Directionality of two Poissonian spike trains:", d + +E = spk.spike_train_order_profile(st1, st2) + +plt.figure() +x, y = E.get_plottable_data() +plt.plot(x, y, '-ob') +plt.ylim(-1.1, 1.1) +plt.xlabel("t") +plt.ylabel("E") +plt.title("Spike Train Order Profile") + + +###### Optimize spike train order of 20 Random spike trains ####### + +M = 20 + +spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + +F_init = spk.spike_train_order(spike_trains) + +print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + +D_init = spk.spike_directionality_matrix(spike_trains) + +phi, _ = spk.optimal_spike_train_sorting(spike_trains) + +F_opt = spk.spike_train_order(spike_trains, indices=phi) + +print "Synfire Indicator of optimized spike train sorting:", F_opt + +D_opt = spk.permutate_matrix(D_init, phi) + +plt.figure() +plt.imshow(D_init) +plt.title("Initial Directionality Matrix") + +plt.figure() +plt.imshow(D_opt) +plt.title("Optimized Directionality Matrix") + +plt.show() diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py index 2cb2bf1..248862c 100644 --- a/pyspike/spike_directionality.py +++ b/pyspike/spike_directionality.py @@ -501,7 +501,7 @@ def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, D = spike_directionality_matrix(spike_trains, normalize=False, indices=indices, interval=interval, max_tau=max_tau) - return _optimal_spike_train_order_from_matrix(D, full_output) + return _optimal_spike_train_sorting_from_matrix(D, full_output) ############################################################ # permutate_matrix -- cgit v1.2.3 From 8ab39a445781175d1c8732d24ff30e90f8d479ab Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 20 Sep 2018 10:58:13 -0700 Subject: Add missing line in tutorial --- doc/tutorial.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/tutorial.rst b/doc/tutorial.rst index 377c0a2..7c3e9b6 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -245,6 +245,7 @@ The following example computes the Spike Order profile and Synfire Indicator of two Poissonian spike trains. .. code:: python + import numpy as np from matplotlib import pyplot as plt import pyspike as spk -- cgit v1.2.3 From fc6f43a23f8bc62bcc10a0e6531ac78d309a3620 Mon Sep 17 00:00:00 2001 From: gspr Date: Wed, 29 Dec 2021 22:53:42 +0100 Subject: Use assert_allclose instead of assert_equal in tests to allow for different floating point behavior on different architectures or optimization levels. (#49) --- test/test_distance.py | 70 +++++++++++++++--------------- test/test_empty.py | 36 +++++++-------- test/test_function.py | 52 +++++++++++----------- test/test_generic_interfaces.py | 18 ++++---- test/test_regression/test_regression_15.py | 20 ++++----- test/test_spikes.py | 10 ++--- test/test_sync_filter.py | 32 +++++++------- 7 files changed, 119 insertions(+), 119 deletions(-) diff --git a/test/test_distance.py b/test/test_distance.py index fe09f34..7ec3a72 100644 --- a/test/test_distance.py +++ b/test/test_distance.py @@ -11,7 +11,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk @@ -41,10 +41,10 @@ def test_isi(): # print("ISI: ", f.y) print("ISI value:", expected_isi_val) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=15) - assert_equal(f.avrg(), expected_isi_val) - assert_equal(spk.isi_distance(t1, t2), expected_isi_val) + assert_allclose(f.avrg(), expected_isi_val) + assert_allclose(spk.isi_distance(t1, t2), expected_isi_val) # check with some equal spike times t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0]) @@ -60,10 +60,10 @@ def test_isi(): f = spk.isi_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y, expected_isi, decimal=15) - assert_equal(f.avrg(), expected_isi_val) - assert_equal(spk.isi_distance(t1, t2), expected_isi_val) + assert_allclose(f.avrg(), expected_isi_val) + assert_allclose(spk.isi_distance(t1, t2), expected_isi_val) def test_spike(): @@ -75,7 +75,7 @@ def test_spike(): f = spk.spike_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) # from SPIKY: y_all = np.array([0.000000000000000000, 0.555555555555555580, @@ -89,7 +89,7 @@ def test_spike(): assert_array_almost_equal(f.y2, y_all[1::2]) assert_almost_equal(f.avrg(), 0.186309523809523814, decimal=15) - assert_equal(spk.spike_distance(t1, t2), f.avrg()) + assert_allclose(spk.spike_distance(t1, t2), f.avrg()) t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0) t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0) @@ -118,7 +118,7 @@ def test_spike(): f = spk.spike_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=15) assert_array_almost_equal(f.y2, expected_y2, decimal=15) assert_almost_equal(f.avrg(), expected_spike_val, decimal=15) @@ -157,7 +157,7 @@ def test_spike(): f = spk.spike_profile(t1, t2) - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) assert_array_almost_equal(f.y2, expected_y2, decimal=14) assert_almost_equal(f.avrg(), expected_spike_val, decimal=16) @@ -236,8 +236,8 @@ def test_spike_sync(): f.add(f2) i12 = f.integral() - assert_equal(i1[0]+i2[0], i12[0]) - assert_equal(i1[1]+i2[1], i12[1]) + assert_allclose(i1[0]+i2[0], i12[0]) + assert_allclose(i1[1]+i2[1], i12[1]) def check_multi_profile(profile_func, profile_func_multi, dist_func_multi): @@ -258,7 +258,7 @@ def check_multi_profile(profile_func, profile_func_multi, dist_func_multi): f_multi = profile_func_multi(spike_trains, [0, 1]) assert f_multi.almost_equal(f12, decimal=14) d = dist_func_multi(spike_trains, [0, 1]) - assert_equal(f_multi.avrg(), d) + assert_allclose(f_multi.avrg(), d) f_multi1 = profile_func_multi(spike_trains, [1, 2, 3]) f_multi2 = profile_func_multi(spike_trains[1:]) @@ -329,11 +329,11 @@ def test_multi_spike_sync(): f = spk.spike_sync_profile_multi(spike_trains) - assert_equal(spike_times, f.x[1:-1]) - assert_equal(len(f.x), len(f.y)) + assert_allclose(spike_times, f.x[1:-1]) + assert_allclose(len(f.x), len(f.y)) - assert_equal(np.sum(f.y[1:-1]), 39932) - assert_equal(np.sum(f.mp[1:-1]), 85554) + assert_allclose(np.sum(f.y[1:-1]), 39932) + assert_allclose(np.sum(f.mp[1:-1]), 85554) # example with 2 empty spike trains sts = [] @@ -365,16 +365,16 @@ def check_dist_matrix(dist_func, dist_matrix_func): f_matrix = dist_matrix_func(spike_trains) # check zero diagonal for i in range(4): - assert_equal(0.0, f_matrix[i, i]) + assert_allclose(0.0, f_matrix[i, i]) for i in range(4): for j in range(i+1, 4): - assert_equal(f_matrix[i, j], f_matrix[j, i]) - assert_equal(f12, f_matrix[1, 0]) - assert_equal(f13, f_matrix[2, 0]) - assert_equal(f14, f_matrix[3, 0]) - assert_equal(f23, f_matrix[2, 1]) - assert_equal(f24, f_matrix[3, 1]) - assert_equal(f34, f_matrix[3, 2]) + assert_allclose(f_matrix[i, j], f_matrix[j, i]) + assert_allclose(f12, f_matrix[1, 0]) + assert_allclose(f13, f_matrix[2, 0]) + assert_allclose(f14, f_matrix[3, 0]) + assert_allclose(f23, f_matrix[2, 1]) + assert_allclose(f24, f_matrix[3, 1]) + assert_allclose(f34, f_matrix[3, 2]) def test_isi_matrix(): @@ -397,13 +397,13 @@ def test_regression_spiky(): isi_dist = spk.isi_distance(st1, st2) assert_almost_equal(isi_dist, 9.0909090909090939e-02, decimal=15) isi_profile = spk.isi_profile(st1, st2) - assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y)) + assert_allclose(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y)) spike_dist = spk.spike_distance(st1, st2) - assert_equal(spike_dist, 0.211058782487353908) + assert_allclose(spike_dist, 0.211058782487353908) spike_sync = spk.spike_sync(st1, st2) - assert_equal(spike_sync, 8.6956521739130432e-01) + assert_allclose(spike_sync, 8.6956521739130432e-01) # multivariate check @@ -414,7 +414,7 @@ def test_regression_spiky(): assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15) spike_profile = spk.spike_profile_multi(spike_trains) - assert_equal(len(spike_profile.y1)+len(spike_profile.y2), 1252) + assert_allclose(len(spike_profile.y1)+len(spike_profile.y2), 1252) spike_dist = spk.spike_distance_multi(spike_trains) # get the full precision from SPIKY @@ -422,7 +422,7 @@ def test_regression_spiky(): spike_sync = spk.spike_sync_multi(spike_trains) # get the full precision from SPIKY - assert_equal(spike_sync, 0.7183531505298066) + assert_allclose(spike_sync, 0.7183531505298066) # Eero's edge correction example st1 = SpikeTrain([0.5, 1.5, 2.5], 6.0) @@ -439,7 +439,7 @@ def test_regression_spiky(): expected_y1 = y_all[::2] expected_y2 = y_all[1::2] - assert_equal(f.x, expected_times) + assert_allclose(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) assert_array_almost_equal(f.y2, expected_y2, decimal=14) @@ -452,15 +452,15 @@ def test_multi_variate_subsets(): v1 = spk.isi_distance_multi(spike_trains_sub_set) v2 = spk.isi_distance_multi(spike_trains, sub_set) - assert_equal(v1, v2) + assert_allclose(v1, v2) v1 = spk.spike_distance_multi(spike_trains_sub_set) v2 = spk.spike_distance_multi(spike_trains, sub_set) - assert_equal(v1, v2) + assert_allclose(v1, v2) v1 = spk.spike_sync_multi(spike_trains_sub_set) v2 = spk.spike_sync_multi(spike_trains, sub_set) - assert_equal(v1, v2) + assert_allclose(v1, v2) if __name__ == "__main__": diff --git a/test/test_empty.py b/test/test_empty.py index 4d0a5cf..93fd2c1 100644 --- a/test/test_empty.py +++ b/test/test_empty.py @@ -10,7 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal import pyspike as spk @@ -33,18 +33,18 @@ def test_isi_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.isi_distance(st1, st2) - assert_equal(d, 0.0) + assert_allclose(d, 0.0) prof = spk.isi_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) assert_array_equal(prof.y, [0.0, ]) st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.isi_distance(st1, st2) - assert_equal(d, 0.6*0.4+0.4*0.6) + assert_allclose(d, 0.6*0.4+0.4*0.6) prof = spk.isi_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) assert_array_equal(prof.y, [0.6, 0.4]) @@ -53,7 +53,7 @@ def test_isi_empty(): d = spk.isi_distance(st1, st2) assert_almost_equal(d, 0.2/0.6*0.4 + 0.0 + 0.2/0.6*0.4, decimal=15) prof = spk.isi_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) assert_array_almost_equal(prof.y, [0.2/0.6, 0.0, 0.2/0.6], decimal=15) @@ -62,9 +62,9 @@ def test_spike_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.spike_distance(st1, st2) - assert_equal(d, 0.0) + assert_allclose(d, 0.0) prof = spk.spike_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) assert_array_equal(prof.y1, [0.0, ]) assert_array_equal(prof.y2, [0.0, ]) @@ -75,7 +75,7 @@ def test_spike_empty(): d_expect = 2*0.4*0.4*1.0/(0.4+1.0)**2 + 2*0.6*0.4*1.0/(0.6+1.0)**2 assert_almost_equal(d, d_expect, decimal=15) prof = spk.spike_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) assert_array_almost_equal(prof.y1, [2*0.4*1.0/(0.4+1.0)**2, 2*0.4*1.0/(0.6+1.0)**2], @@ -100,7 +100,7 @@ def test_spike_empty(): assert_almost_equal(d, expected_spike_val, decimal=15) prof = spk.spike_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) assert_array_almost_equal(prof.y1, expected_y1, decimal=15) assert_array_almost_equal(prof.y2, expected_y2, decimal=15) @@ -110,18 +110,18 @@ def test_spike_sync_empty(): st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([], edges=(0.0, 1.0)) d = spk.spike_sync(st1, st2) - assert_equal(d, 1.0) + assert_allclose(d, 1.0) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 1.0]) assert_array_equal(prof.y, [1.0, 1.0]) st1 = SpikeTrain([], edges=(0.0, 1.0)) st2 = SpikeTrain([0.4, ], edges=(0.0, 1.0)) d = spk.spike_sync(st1, st2) - assert_equal(d, 0.0) + assert_allclose(d, 0.0) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_equal(prof.x, [0.0, 0.4, 1.0]) assert_array_equal(prof.y, [0.0, 0.0, 0.0]) @@ -130,7 +130,7 @@ def test_spike_sync_empty(): d = spk.spike_sync(st1, st2) assert_almost_equal(d, 1.0, decimal=15) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.4, 0.6, 1.0], decimal=15) assert_array_almost_equal(prof.y, [1.0, 1.0, 1.0, 1.0], decimal=15) @@ -139,7 +139,7 @@ def test_spike_sync_empty(): d = spk.spike_sync(st1, st2) assert_almost_equal(d, 0.0, decimal=15) prof = spk.spike_sync_profile(st1, st2) - assert_equal(d, prof.avrg()) + assert_allclose(d, prof.avrg()) assert_array_almost_equal(prof.x, [0.0, 0.2, 0.8, 1.0], decimal=15) assert_array_almost_equal(prof.y, [0.0, 0.0, 0.0, 0.0], decimal=15) @@ -148,9 +148,9 @@ def test_spike_sync_empty(): st2 = SpikeTrain([2.1, 7.0], [0, 10.0]) st3 = SpikeTrain([5.1, 6.0], [0, 10.0]) res = spk.spike_sync_profile(st1, st2).avrg(interval=[3.0, 4.0]) - assert_equal(res, 1.0) + assert_allclose(res, 1.0) res = spk.spike_sync(st1, st2, interval=[3.0, 4.0]) - assert_equal(res, 1.0) + assert_allclose(res, 1.0) sync_matrix = spk.spike_sync_matrix([st1, st2, st3], interval=[3.0, 4.0]) assert_array_equal(sync_matrix, np.ones((3, 3)) - np.diag(np.ones(3))) diff --git a/test/test_function.py b/test/test_function.py index 6c04839..ba10ae7 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -11,7 +11,7 @@ from __future__ import print_function import numpy as np from copy import copy from nose.tools import raises -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal import pyspike as spk @@ -24,14 +24,14 @@ def test_pwc(): f = spk.PieceWiseConstFunc(x, y) # function values - assert_equal(f(0.0), 1.0) - assert_equal(f(0.5), 1.0) - assert_equal(f(1.0), 0.25) - assert_equal(f(2.0), 0.5) - assert_equal(f(2.25), 1.5) - assert_equal(f(2.5), 2.25/2) - assert_equal(f(3.5), 0.75) - assert_equal(f(4.0), 0.75) + assert_allclose(f(0.0), 1.0) + assert_allclose(f(0.5), 1.0) + assert_allclose(f(1.0), 0.25) + assert_allclose(f(2.0), 0.5) + assert_allclose(f(2.25), 1.5) + assert_allclose(f(2.5), 2.25/2) + assert_allclose(f(3.5), 0.75) + assert_allclose(f(4.0), 0.75) assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]), [1.0, 1.0, 0.25, 0.5, 1.5, 2.25/2, 0.75, 0.75]) @@ -131,21 +131,21 @@ def test_pwc_integral(): # test full interval full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; - assert_equal(f1.integral(), full) - assert_equal(f1.integral((np.min(x),np.max(x))), full) + assert_allclose(f1.integral(), full) + assert_allclose(f1.integral((np.min(x),np.max(x))), full) # test part interval, spanning an edge - assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + assert_allclose(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) # test part interval, just over two edges assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) # test part interval, between two edges - assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) - assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + assert_allclose(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_allclose(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) # test part interval, start to before and after edge - assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) - assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + assert_allclose(f1.integral((0.0,0.7)), 0.7*1.0) + assert_allclose(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) # test part interval, before and after edge till end - assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) - assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + assert_allclose(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_allclose(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) @raises(ValueError) def test_pwc_integral_bad_bounds_inv(): @@ -178,14 +178,14 @@ def test_pwl(): f = spk.PieceWiseLinFunc(x, y1, y2) # function values - assert_equal(f(0.0), 1.0) - assert_equal(f(0.5), 1.25) - assert_equal(f(1.0), 0.5) - assert_equal(f(2.0), 1.1/2) - assert_equal(f(2.25), 1.5) - assert_equal(f(2.5), 2.25/2) - assert_equal(f(3.5), 0.75-0.5*1.0/1.5) - assert_equal(f(4.0), 0.25) + assert_allclose(f(0.0), 1.0) + assert_allclose(f(0.5), 1.25) + assert_allclose(f(1.0), 0.5) + assert_allclose(f(2.0), 1.1/2) + assert_allclose(f(2.25), 1.5) + assert_allclose(f(2.5), 2.25/2) + assert_allclose(f(3.5), 0.75-0.5*1.0/1.5) + assert_allclose(f(4.0), 0.25) assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]), [1.0, 1.25, 0.5, 0.55, 1.5, 2.25/2, 0.75-0.5/1.5, 0.25]) diff --git a/test/test_generic_interfaces.py b/test/test_generic_interfaces.py index 7f08067..553f3f4 100644 --- a/test/test_generic_interfaces.py +++ b/test/test_generic_interfaces.py @@ -9,7 +9,7 @@ Distributed under the BSD License """ from __future__ import print_function -from numpy.testing import assert_equal +from numpy.testing import assert_allclose import pyspike as spk from pyspike import SpikeTrain @@ -43,33 +43,33 @@ def check_func(dist_func): isi12 = dist_func(t1, t2) isi12_ = dist_func([t1, t2]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi12_ = dist_func(spike_trains, indices=[0, 1]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi123 = dist_func(t1, t2, t3) isi123_ = dist_func([t1, t2, t3]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) isi123_ = dist_func(spike_trains, indices=[0, 1, 2]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) # run the same test with an additional interval parameter isi12 = dist_func(t1, t2, interval=[0.0, 0.5]) isi12_ = dist_func([t1, t2], interval=[0.0, 0.5]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi12_ = dist_func(spike_trains, indices=[0, 1], interval=[0.0, 0.5]) - assert_equal(isi12, isi12_) + assert_allclose(isi12, isi12_) isi123 = dist_func(t1, t2, t3, interval=[0.0, 0.5]) isi123_ = dist_func([t1, t2, t3], interval=[0.0, 0.5]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) isi123_ = dist_func(spike_trains, indices=[0, 1, 2], interval=[0.0, 0.5]) - assert_equal(isi123, isi123_) + assert_allclose(isi123, isi123_) def test_isi_profile(): diff --git a/test/test_regression/test_regression_15.py b/test/test_regression/test_regression_15.py index 54adf23..81b5bb0 100644 --- a/test/test_regression/test_regression_15.py +++ b/test/test_regression/test_regression_15.py @@ -11,7 +11,7 @@ Distributed under the BSD License from __future__ import division import numpy as np -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk @@ -28,15 +28,15 @@ def test_regression_15_isi(): N = len(spike_trains) dist_mat = spk.isi_distance_matrix(spike_trains) - assert_equal(dist_mat.shape, (N, N)) + assert_allclose(dist_mat.shape, (N, N)) ind = np.arange(N//2) dist_mat = spk.isi_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) ind = np.arange(N//2, N) dist_mat = spk.isi_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) def test_regression_15_spike(): @@ -46,15 +46,15 @@ def test_regression_15_spike(): N = len(spike_trains) dist_mat = spk.spike_distance_matrix(spike_trains) - assert_equal(dist_mat.shape, (N, N)) + assert_allclose(dist_mat.shape, (N, N)) ind = np.arange(N//2) dist_mat = spk.spike_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) ind = np.arange(N//2, N) dist_mat = spk.spike_distance_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) def test_regression_15_sync(): @@ -64,15 +64,15 @@ def test_regression_15_sync(): N = len(spike_trains) dist_mat = spk.spike_sync_matrix(spike_trains) - assert_equal(dist_mat.shape, (N, N)) + assert_allclose(dist_mat.shape, (N, N)) ind = np.arange(N//2) dist_mat = spk.spike_sync_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) ind = np.arange(N//2, N) dist_mat = spk.spike_sync_matrix(spike_trains, ind) - assert_equal(dist_mat.shape, (N//2, N//2)) + assert_allclose(dist_mat.shape, (N//2, N//2)) if __name__ == "__main__": diff --git a/test/test_spikes.py b/test/test_spikes.py index ee505b5..579f8e1 100644 --- a/test/test_spikes.py +++ b/test/test_spikes.py @@ -9,7 +9,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np -from numpy.testing import assert_equal +from numpy.testing import assert_allclose import pyspike as spk @@ -29,7 +29,7 @@ def test_load_from_txt(): spike_times = [64.886, 305.81, 696, 937.77, 1059.7, 1322.2, 1576.1, 1808.1, 2121.5, 2381.1, 2728.6, 2966.9, 3223.7, 3473.7, 3644.3, 3936.3] - assert_equal(spike_times, spike_trains[0].spikes) + assert_allclose(spike_times, spike_trains[0].spikes) # check auxiliary spikes for spike_train in spike_trains: @@ -47,9 +47,9 @@ def test_load_time_series(): # check spike trains for n in range(len(spike_trains)): - assert_equal(spike_trains[n].spikes, spike_trains_check[n].spikes) - assert_equal(spike_trains[n].t_start, 0) - assert_equal(spike_trains[n].t_end, 4000) + assert_allclose(spike_trains[n].spikes, spike_trains_check[n].spikes) + assert_allclose(spike_trains[n].t_start, 0) + assert_allclose(spike_trains[n].t_end, 4000) def check_merged_spikes(merged_spikes, spike_trains): diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py index e259903..0b915db 100644 --- a/test/test_sync_filter.py +++ b/test/test_sync_filter.py @@ -10,7 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np -from numpy.testing import assert_equal, assert_almost_equal, \ +from numpy.testing import assert_allclose, assert_almost_equal, \ assert_array_almost_equal import pyspike as spk @@ -36,21 +36,21 @@ def test_single_prof(): coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) print(coincidences) for i, t in enumerate(st1): - assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], - "At index %d" % i) + assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t], + err_msg="At index %d" % i) coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) for i, t in enumerate(st2): - assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], - "At index %d" % i) + assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t], + err_msg="At index %d" % i) sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), SpikeTrain(st3, 5.0)) coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) for i, t in enumerate(st1): - assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], - "At index %d" % i) + assert_allclose(coincidences[i], sync_prof.y[sync_prof.x == t], + err_msg="At index %d" % i) st1 = np.array([1.0, 2.0, 3.0, 4.0]) st2 = np.array([1.0, 2.0, 4.0]) @@ -61,8 +61,8 @@ def test_single_prof(): coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) for i, t in enumerate(st1): expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] - assert_equal(coincidences[i], expected, - "At index %d" % i) + assert_allclose(coincidences[i], expected, + err_msg="At index %d" % i) def test_filter(): @@ -72,22 +72,22 @@ def test_filter(): # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) - # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) - # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + # assert_allclose(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_allclose(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) - # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) - # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + # assert_allclose(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_allclose(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) for st in filtered_spike_trains: print(st.spikes) - assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) - assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) - assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + assert_allclose(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_allclose(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_allclose(filtered_spike_trains[2].spikes, [0.9, 4.1]) if __name__ == "main": -- cgit v1.2.3 From ac99d20549a103e8242021d4d388b6ec8557efd0 Mon Sep 17 00:00:00 2001 From: gspr Date: Wed, 29 Dec 2021 22:55:27 +0100 Subject: Python 3.10 compatibility fix (#53) Importing Sequence et al. from collections has been deprecated since Python 3.7. --- pyspike/DiscreteFunc.py | 6 +++--- pyspike/PieceWiseConstFunc.py | 8 ++++---- pyspike/PieceWiseLinFunc.py | 8 ++++---- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index caad290..48bc787 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -5,7 +5,7 @@ from __future__ import absolute_import, print_function import numpy as np -import collections +import collections.abc import pyspike @@ -155,11 +155,11 @@ class DiscreteFunc(object): multiplicity = np.sum(self.mp[1:-1]) else: # check if interval is as sequence - assert isinstance(interval, collections.Sequence), \ + assert isinstance(interval, collections.abc.Sequence), \ "Invalid value for `interval`. None, Sequence or Tuple \ expected." # check if interval is a sequence of intervals - if not isinstance(interval[0], collections.Sequence): + if not isinstance(interval[0], collections.abc.Sequence): # find the indices corresponding to the interval start_ind, end_ind = get_indices(interval) value = np.sum(self.y[start_ind:end_ind]) diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 17fdd3f..e33c61d 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -5,7 +5,7 @@ from __future__ import absolute_import, print_function import numpy as np -import collections +import collections.abc import pyspike @@ -39,7 +39,7 @@ class PieceWiseConstFunc(object): ind = np.searchsorted(self.x, t, side='right') - if isinstance(t, collections.Sequence): + if isinstance(t, collections.abc.Sequence): # t is a sequence of values # correct the cases t == x[0], t == x[-1] ind[ind == 0] = 1 @@ -173,10 +173,10 @@ class PieceWiseConstFunc(object): return self.integral() / (self.x[-1]-self.x[0]) # check if interval is as sequence - assert isinstance(interval, collections.Sequence), \ + assert isinstance(interval, collections.abc.Sequence), \ "Invalid value for `interval`. None, Sequence or Tuple expected." # check if interval is a sequence of intervals - if not isinstance(interval[0], collections.Sequence): + if not isinstance(interval[0], collections.abc.Sequence): # just one interval a = self.integral(interval) / (interval[1]-interval[0]) else: diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8faaec4..b3b503b 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -5,7 +5,7 @@ from __future__ import absolute_import, print_function import numpy as np -import collections +import collections.abc import pyspike @@ -46,7 +46,7 @@ class PieceWiseLinFunc: ind = np.searchsorted(self.x, t, side='right') - if isinstance(t, collections.Sequence): + if isinstance(t, collections.abc.Sequence): # t is a sequence of values # correct the cases t == x[0], t == x[-1] ind[ind == 0] = 1 @@ -211,10 +211,10 @@ class PieceWiseLinFunc: return self.integral() / (self.x[-1]-self.x[0]) # check if interval is as sequence - assert isinstance(interval, collections.Sequence), \ + assert isinstance(interval, collections.abc.Sequence), \ "Invalid value for `interval`. None, Sequence or Tuple expected." # check if interval is a sequence of intervals - if not isinstance(interval[0], collections.Sequence): + if not isinstance(interval[0], collections.abc.Sequence): # just one interval a = self.integral(interval) / (interval[1]-interval[0]) else: -- cgit v1.2.3 From 27e69cce345795758b84d054b33e492b0915d718 Mon Sep 17 00:00:00 2001 From: Xhuliano Brace <34151358+xhulianoThe1@users.noreply.github.com> Date: Wed, 29 Dec 2021 14:00:17 -0800 Subject: Fix readme links with updated links (#54) --- Readme.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Readme.rst b/Readme.rst index 74b014b..82f341a 100644 --- a/Readme.rst +++ b/Readme.rst @@ -22,9 +22,9 @@ If you use PySpike in your research, please cite our SoftwareX publication on Py Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2] or SPIKE-Synchronization [3], please cite one or more of the following publications: -.. [#] Kreuz T, Haas JS, Morelli A, Abarbanel HDI, Politi A, *Measuring spike train synchrony.* J Neurosci Methods 165, 151 (2007) `[pdf] `_ +.. [#] Kreuz T, Haas JS, Morelli A, Abarbanel HDI, Politi A, *Measuring spike train synchrony.* J Neurosci Methods 165, 151 (2007) `[pdf] `_ -.. [#] Kreuz T, Chicharro D, Houghton C, Andrzejak RG, Mormann F, *Monitoring spike train synchrony.* J Neurophysiol 109, 1457 (2013) `[pdf] `_ +.. [#] Kreuz T, Chicharro D, Houghton C, Andrzejak RG, Mormann F, *Monitoring spike train synchrony.* J Neurophysiol 109, 1457 (2013) `[pdf] `_ .. [#] Kreuz T, Mulansky M and Bozanic N, *SPIKY: A graphical user interface for monitoring spike train synchrony*, J Neurophysiol, JNeurophysiol 113, 3432 (2015) @@ -135,5 +135,5 @@ Curie Initial Training Network* `Neural Engineering Transformative Technologies .. _SPIKE: http://www.scholarpedia.org/article/SPIKE-distance .. _SPIKE-Synchronization: http://www.scholarpedia.org/article/Measures_of_spike_train_synchrony#SPIKE_synchronization .. _cython: http://www.cython.org -.. _SPIKY: http://wwwold.fi.isc.cnr.it/users/thomas.kreuz/Source-Code/SPIKY.html +.. _SPIKY: https://thomas-kreuz.complexworld.net/source-codes/spiky .. _BSD_License: http://opensource.org/licenses/BSD-2-Clause -- cgit v1.2.3 From 59b4e6881d07ef8fc089af0918c01fd835f7efe1 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 17:02:06 -0600 Subject: Create python-package.yml Switch from travis to GitHub actions --- .github/workflows/python-package.yml | 45 ++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 .github/workflows/python-package.yml diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml new file mode 100644 index 0000000..48bb849 --- /dev/null +++ b/.github/workflows/python-package.yml @@ -0,0 +1,45 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Python package + +on: + push: + branches: [ master, develop ] + pull_request: + branches: [ master, develop ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update + sudo apt-get install libblas-dev + sudo apt-get install liblapack-dev + sudo apt-get install gfortran + python -m pip install --upgrade pip + python -m pip install flake8 pytest nose numpy scipy + python -m pip install -q cython + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + # - name: Lint with flake8 + # run: | + # # stop the build if there are Python syntax errors or undefined names + # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + - name: Test with nosetest + run: | + nosetes -- cgit v1.2.3 From ea268fc804d074450e3c0c15d496d0dfbc12ac15 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 17:05:36 -0600 Subject: Fix typo in workflow --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 48bb849..605a31e 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -42,4 +42,4 @@ jobs: # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with nosetest run: | - nosetes + nosetests -- cgit v1.2.3 From 3581e2fb182a4b1c5e0e120cd326b5d44574ea3a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 17:11:48 -0600 Subject: Add setup.py command to test workflow --- .github/workflows/python-package.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 605a31e..80b7ba3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -34,6 +34,9 @@ jobs: python -m pip install flake8 pytest nose numpy scipy python -m pip install -q cython if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Install package + run: | + python setup.py build_ext --inplace # - name: Lint with flake8 # run: | # # stop the build if there are Python syntax errors or undefined names @@ -43,3 +46,4 @@ jobs: - name: Test with nosetest run: | nosetests + nosetests test/numeric -- cgit v1.2.3 From 17fd0f63712a38c5b5e08456f6e9b192fee41c83 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 17:17:36 -0600 Subject: Use pytest in CI instead of nosetests --- .github/workflows/python-package.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 80b7ba3..0f9e548 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -45,5 +45,4 @@ jobs: # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with nosetest run: | - nosetests - nosetests test/numeric + pytest -- cgit v1.2.3 From 74445a5c93fb029e140e70c2ef28304c12e0a85e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 17:27:52 -0600 Subject: Remove --inplace from workflow install --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 0f9e548..81724d8 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -36,7 +36,7 @@ jobs: if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Install package run: | - python setup.py build_ext --inplace + python setup.py build_ext # - name: Lint with flake8 # run: | # # stop the build if there are Python syntax errors or undefined names -- cgit v1.2.3 From a0053c004ed55e7eb5effd43faa3c0d49cf7c373 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 17:39:10 -0600 Subject: Call python -m pytest in test workflow --- .github/workflows/python-package.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 81724d8..edb25d2 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -43,6 +43,6 @@ jobs: # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test with nosetest + - name: Test with PyTest run: | - pytest + python3 -m pytest -- cgit v1.2.3 From cf421763434059d5fb1e5cc347b0db781116767d Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 17:53:33 -0600 Subject: Add cython to test matrix --- .github/workflows/python-package.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index edb25d2..6585af4 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -17,6 +17,7 @@ jobs: fail-fast: false matrix: python-version: ["3.7", "3.8", "3.9", "3.10"] + cython: ['python -m pip install -q cython', 'echo "No Cython"'] steps: - uses: actions/checkout@v2 @@ -32,11 +33,13 @@ jobs: sudo apt-get install gfortran python -m pip install --upgrade pip python -m pip install flake8 pytest nose numpy scipy - python -m pip install -q cython if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Install Cython + run: | + ${{ matrix.cython }} - name: Install package run: | - python setup.py build_ext + python setup.py build_ext --inplace # - name: Lint with flake8 # run: | # # stop the build if there are Python syntax errors or undefined names @@ -45,4 +48,4 @@ jobs: # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with PyTest run: | - python3 -m pytest + python -m pytest -- cgit v1.2.3 From 14f9130b19a490536171532999fc32012c963744 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 18:14:55 -0600 Subject: Set language level 3 in cython files --- pyspike/cython/cython_add.pyx | 1 + pyspike/cython/cython_directionality.pyx | 1 + pyspike/cython/cython_distances.pyx | 1 + pyspike/cython/cython_profiles.pyx | 1 + pyspike/cython/cython_simulated_annealing.pyx | 1 + 5 files changed, 5 insertions(+) diff --git a/pyspike/cython/cython_add.pyx b/pyspike/cython/cython_add.pyx index 25f1181..f38406a 100644 --- a/pyspike/cython/cython_add.pyx +++ b/pyspike/cython/cython_add.pyx @@ -1,3 +1,4 @@ +#cython: language_level=3 #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx index ac37690..40450cd 100644 --- a/pyspike/cython/cython_directionality.pyx +++ b/pyspike/cython/cython_directionality.pyx @@ -1,3 +1,4 @@ +#cython: language_level=3 #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index d4070ae..f049718 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -1,3 +1,4 @@ +#cython: language_level=3 #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index aa24db4..a83e4f7 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -1,3 +1,4 @@ +#cython: language_level=3 #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx index be9423c..53ecde7 100644 --- a/pyspike/cython/cython_simulated_annealing.pyx +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -1,3 +1,4 @@ +#cython: language_level=3 #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True -- cgit v1.2.3 From 8ee450c0d9572ee694bc6af064d5db5a686b822e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 18:15:34 -0600 Subject: Bump version to 0.7 --- Changelog | 6 ++++++ setup.py | 12 +++++------- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/Changelog b/Changelog index 88e16cc..3641458 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,9 @@ +PySpike v0.7: + * Python 3.10 compatible + * Dropped support for Python2 + * Small fixes in tests + * Fixed documentation links + PySpike v0.6: * Support for computing spike directionality and spike train order diff --git a/setup.py b/setup.py index b5b01a6..0aea3a0 100644 --- a/setup.py +++ b/setup.py @@ -72,7 +72,7 @@ elif use_c: # c files are there, compile to binaries setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.6.0', + version='0.7.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], @@ -98,13 +98,11 @@ train similarity', 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6' + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', ], package_data={ 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', -- cgit v1.2.3 From 504ded4b3129a1bb7fdcfdc74fb394f838687345 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 29 Dec 2021 18:30:51 -0600 Subject: Add changelog note to Readme, remvove .travis.yml --- .travis.yml | 22 ---------------------- Readme.rst | 3 +++ 2 files changed, 3 insertions(+), 22 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 2877331..0000000 --- a/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -language: python -python: - - "2.7" - - "3.4" - - "3.5" - - "3.6" -env: - - CYTHON_INSTALL="pip install -q cython" - - CYTHON_INSTALL="" -before_install: - - sudo apt-get update - - sudo apt-get install libblas-dev - - sudo apt-get install liblapack-dev - - sudo apt-get install gfortran -install: - - pip install scipy - - $CYTHON_INSTALL - -script: - - python setup.py build_ext --inplace - - nosetests - - travis_wait 30 nosetests test/numeric diff --git a/Readme.rst b/Readme.rst index 82f341a..bd91347 100644 --- a/Readme.rst +++ b/Readme.rst @@ -31,6 +31,9 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2 Important Changelog ----------------------------- +With version 0.7.0, support for Python 2 was dropped, PySpike now officially supports +Python 3.7, 3.8, 3.9, 3.10. + With version 0.6.0, the spike directionality and spike train order function have been added. With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated. -- cgit v1.2.3