From 01fa64bd8a8931544a1734de104e2fe72018694f Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 28 Jan 2015 19:22:02 +0100 Subject: added missing py files --- pyspike/PieceWiseConstFunc.py | 168 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 168 insertions(+) create mode 100644 pyspike/PieceWiseConstFunc.py (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py new file mode 100644 index 0000000..e639dfc --- /dev/null +++ b/pyspike/PieceWiseConstFunc.py @@ -0,0 +1,168 @@ +""" +Class representing piece-wise constant functions. + +Copyright 2014-2015, Mario Mulansky + +Distributed under the BSD License + +""" +from __future__ import print_function + +import numpy as np +import collections + + +############################################################## +# PieceWiseConstFunc +############################################################## +class PieceWiseConstFunc(object): + """ A class representing a piece-wise constant function. """ + + def __init__(self, x, y): + """ Constructs the piece-wise const function. + + :param x: array of length N+1 defining the edges of the intervals of + the pwc function. + :param y: array of length N defining the function values at the + intervals. + """ + # convert parameters to arrays, also ensures copying + self.x = np.array(x) + self.y = np.array(y) + + def copy(self): + """ Returns a copy of itself + + :rtype: :class:`PieceWiseConstFunc` + """ + return PieceWiseConstFunc(self.x, self.y) + + def almost_equal(self, other, decimal=14): + """ Checks if the function is equal to another function up to `decimal` + precision. + + :param other: another :class:`PieceWiseConstFunc` + :returns: True if the two functions are equal up to `decimal` decimals, + False otherwise + :rtype: bool + """ + eps = 10.0**(-decimal) + return np.allclose(self.x, other.x, atol=eps, rtol=0.0) and \ + np.allclose(self.y, other.y, atol=eps, rtol=0.0) + + def get_plottable_data(self): + """ Returns two arrays containing x- and y-coordinates for immeditate + plotting of the piece-wise function. + + :returns: (x_plot, y_plot) containing plottable data + :rtype: pair of np.array + + Example:: + + x, y = f.get_plottable_data() + plt.plot(x, y, '-o', label="Piece-wise const function") + """ + + x_plot = np.empty(2*len(self.x)-2) + x_plot[0] = self.x[0] + x_plot[1::2] = self.x[1:] + x_plot[2::2] = self.x[1:-1] + y_plot = np.empty(2*len(self.y)) + y_plot[::2] = self.y + y_plot[1::2] = self.y + + return x_plot, y_plot + + def integral(self, interval=None): + """ Returns the integral over the given interval. + + :param interval: integration interval given as a pair of floats, if + None the integral over the whole function is computed. + :type interval: Pair of floats or None. + :returns: the integral + :rtype: float + """ + if interval is None: + # no interval given, integrate over the whole spike train + a = np.sum((self.x[1:]-self.x[:-1]) * self.y) + else: + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + return a + + def avrg(self, interval=None): + """ Computes the average of the piece-wise const function: + :math:`a = 1/T int_0^T f(x) dx` where T is the length of the interval. + + :param interval: averaging interval given as a pair of floats, a + sequence of pairs for averaging multiple intervals, or + None, if None the average over the whole function is + computed. + :type interval: Pair, sequence of pairs, or None. + :returns: the average a. + :rtype: float + """ + if interval is None: + # no interval given, average over the whole spike train + return self.integral() / (self.x[-1]-self.x[0]) + + # check if interval is as sequence + assert isinstance(interval, collections.Sequence), \ + "Invalid value for `interval`. None, Sequence or Tuple expected." + # check if interval is a sequence of intervals + if not isinstance(interval[0], collections.Sequence): + # just one interval + a = self.integral(interval) / (interval[1]-interval[0]) + else: + # several intervals + a = 0.0 + int_length = 0.0 + for ival in interval: + a += self.integral(ival) + int_length += ival[1] - ival[0] + a /= int_length + return a + + def add(self, f): + """ Adds another PieceWiseConst function to this function. + Note: only functions defined on the same interval can be summed. + + :param f: :class:`PieceWiseConstFunc` function to be added. + :rtype: None + """ + assert self.x[0] == f.x[0], "The functions have different intervals" + assert self.x[-1] == f.x[-1], "The functions have different intervals" + + # cython version + try: + from cython_add import add_piece_wise_const_cython as \ + add_piece_wise_const_impl + except ImportError: + print("Warning: add_piece_wise_const_cython not found. Make sure \ +that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ +\n Falling back to slow python backend.") + # use python backend + from python_backend import add_piece_wise_const_python as \ + add_piece_wise_const_impl + + self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y) + + def mul_scalar(self, fac): + """ Multiplies the function with a scalar value + + :param fac: Value to multiply + :type fac: double + :rtype: None + """ + self.y *= fac -- cgit v1.2.3 From 6eb6bc486027d3d5304a94cfb417a2257f2b6fd9 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 3 Feb 2015 12:19:53 +0100 Subject: moved cython functions to subdirectory --- pyspike/DiscreteFunc.py | 4 +- pyspike/PieceWiseConstFunc.py | 4 +- pyspike/PieceWiseLinFunc.py | 4 +- pyspike/cython/cython_add.pyx | 235 ++++++++++++++++++ pyspike/cython/cython_distance.pyx | 312 ++++++++++++++++++++++++ pyspike/cython/python_backend.py | 485 +++++++++++++++++++++++++++++++++++++ pyspike/cython_add.pyx | 235 ------------------ pyspike/cython_distance.pyx | 312 ------------------------ pyspike/isi_distance.py | 6 +- pyspike/python_backend.py | 485 ------------------------------------- pyspike/spike_distance.py | 5 +- pyspike/spike_sync.py | 4 +- setup.py | 14 +- 13 files changed, 1054 insertions(+), 1051 deletions(-) create mode 100644 pyspike/cython/cython_add.pyx create mode 100644 pyspike/cython/cython_distance.pyx create mode 100644 pyspike/cython/python_backend.py delete mode 100644 pyspike/cython_add.pyx delete mode 100644 pyspike/cython_distance.pyx delete mode 100644 pyspike/python_backend.py (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 3e24284..2283e03 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -198,7 +198,7 @@ class DiscreteFunc(object): # cython version try: - from cython_add import add_discrete_function_cython as \ + from cython.cython_add import add_discrete_function_cython as \ add_discrete_function_impl except ImportError: print("Warning: add_discrete_function_cython not found. Make \ @@ -206,7 +206,7 @@ sure that PySpike is installed by running\n\ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from python_backend import add_discrete_function_python as \ + from cython.python_backend import add_discrete_function_python as \ add_discrete_function_impl self.x, self.y, self.mp = \ diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index e639dfc..dc57ab1 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -146,14 +146,14 @@ class PieceWiseConstFunc(object): # cython version try: - from cython_add import add_piece_wise_const_cython as \ + from cython.cython_add import add_piece_wise_const_cython as \ add_piece_wise_const_impl except ImportError: print("Warning: add_piece_wise_const_cython not found. Make sure \ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from python_backend import add_piece_wise_const_python as \ + from cython.python_backend import add_piece_wise_const_python as \ add_piece_wise_const_impl self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y) diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 58a20e5..bc0aa2a 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -173,14 +173,14 @@ class PieceWiseLinFunc: # cython version try: - from cython_add import add_piece_wise_lin_cython as \ + from cython.cython_add import add_piece_wise_lin_cython as \ add_piece_wise_lin_impl except ImportError: print("Warning: add_piece_wise_lin_cython not found. Make sure \ that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from python_backend import add_piece_wise_lin_python as \ + from cython.python_backend import add_piece_wise_lin_python as \ add_piece_wise_lin_impl self.x, self.y1, self.y2 = add_piece_wise_lin_impl( diff --git a/pyspike/cython/cython_add.pyx b/pyspike/cython/cython_add.pyx new file mode 100644 index 0000000..ac64005 --- /dev/null +++ b/pyspike/cython/cython_add.pyx @@ -0,0 +1,235 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_add.pyx + +cython implementation of the add function for piece-wise const and +piece-wise linear functions + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_add.pyx + +which gives:: + + cython_add.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# add_piece_wise_const_cython +############################################################ +def add_piece_wise_const_cython(double[:] x1, double[:] y1, + double[:] x2, double[:] y2): + + cdef int N1 = len(x1) + cdef int N2 = len(x2) + cdef double[:] x_new = np.empty(N1+N2) + cdef double[:] y_new = np.empty(N1+N2-1) + cdef int index1 = 0 + cdef int index2 = 0 + cdef int index = 0 + cdef int i + with nogil: # release the interpreter lock to allow multi-threading + x_new[0] = x1[0] + y_new[0] = y1[0] + y2[0] + while (index1+1 < N1-1) and (index2+1 < N2-1): + index += 1 + # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + index1 += 1 + x_new[index] = x1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + x_new[index] = x2[index2] + else: # x1[index1+1] == x2[index2+1]: + index1 += 1 + index2 += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < N1-1: + x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] + for i in xrange(N1-index1-2): + y_new[index+1+i] = y1[index1+1+i] + y2[N2-2] + index += N1-index1-2 + elif index2+1 < N2-1: + x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] + for i in xrange(N2-index2-2): + y_new[index+1+i] = y2[index2+1+i] + y1[N1-2] + index += N2-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[N1-1] + # the last value is again the end of the interval + # x_new[index+1] = x1[-1] + # only use the data that was actually filled + x1 = x_new[:index+2] + y1 = y_new[:index+1] + # end nogil + return np.array(x_new[:index+2]), np.array(y_new[:index+1]) + + +############################################################ +# add_piece_wise_lin_cython +############################################################ +def add_piece_wise_lin_cython(double[:] x1, double[:] y11, double[:] y12, + double[:] x2, double[:] y21, double[:] y22): + cdef int N1 = len(x1) + cdef int N2 = len(x2) + cdef double[:] x_new = np.empty(N1+N2) + cdef double[:] y1_new = np.empty(N1+N2-1) + cdef double[:] y2_new = np.empty_like(y1_new) + cdef int index1 = 0 # index for self + cdef int index2 = 0 # index for f + cdef int index = 0 # index for new + cdef int i + cdef double y + with nogil: # release the interpreter lock to allow multi-threading + x_new[0] = x1[0] + y1_new[0] = y11[0] + y21[0] + while (index1+1 < N1-1) and (index2+1 < N2-1): + # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) + y2_new[index] = y12[index1] + y + index1 += 1 + index += 1 + x_new[index] = x1[index1] + # and the starting value for the next interval + y1_new[index] = y11[index1] + y + elif x1[index1+1] > x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + y2_new[index] = y22[index2] + y + index2 += 1 + index += 1 + x_new[index] = x2[index2] + # and the starting value for the next interval + y1_new[index] = y21[index2] + y + else: # x1[index1+1] == x2[index2+1]: + y2_new[index] = y12[index1] + y22[index2] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y1_new[index] = y11[index1] + y21[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < N1-1: + x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] + for i in xrange(N1-index1-2): + # compute the linear interpolations value + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1+i]-x2[index2]) / (x2[index2+1]-x2[index2]) + y1_new[index+1+i] = y11[index1+1+i] + y + y2_new[index+i] = y12[index1+i] + y + index += N1-index1-2 + elif index2+1 < N2-1: + x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] + # compute the linear interpolations values + for i in xrange(N2-index2-2): + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1+i]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + y1_new[index+1+i] = y21[index2+1+i] + y + y2_new[index+i] = y22[index2+i] + y + index += N2-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[N1-1] + # finally, the end value for the last interval + y2_new[index] = y12[N1-2]+y22[N2-2] + # only use the data that was actually filled + # end nogil + return (np.array(x_new[:index+2]), + np.array(y1_new[:index+1]), + np.array(y2_new[:index+1])) + + +############################################################ +# add_discrete_function_cython +############################################################ +def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, + double[:] x2, double[:] y2, double[:] mp2): + + cdef double[:] x_new = np.empty(len(x1) + len(x2)) + cdef double[:] y_new = np.empty_like(x_new) + cdef double[:] mp_new = np.empty_like(x_new) + cdef int index1 = 0 + cdef int index2 = 0 + cdef int index = 0 + cdef int N1 = len(y1) + cdef int N2 = len(y2) + x_new[0] = x1[0] + while (index1+1 < N1) and (index2+1 < N2): + if x1[index1+1] < x2[index2+1]: + index1 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + index += 1 + x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] + else: # x1[index1+1] == x2[index2+1] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + + # the last value is again the end of the interval + # only use the data that was actually filled + return (np.array(x_new[:index+1]), + np.array(y_new[:index+1]), + np.array(mp_new[:index+1])) diff --git a/pyspike/cython/cython_distance.pyx b/pyspike/cython/cython_distance.pyx new file mode 100644 index 0000000..489aab9 --- /dev/null +++ b/pyspike/cython/cython_distance.pyx @@ -0,0 +1,312 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_distances.pyx + +cython implementation of the isi- and spike-distance + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_distance.pyx + +which gives:: + + cython_distance.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# isi_distance_cython +############################################################ +def isi_distance_cython(double[:] s1, + double[:] s2): + + cdef double[:] spike_events + cdef double[:] isi_values + cdef int index1, index2, index + cdef int N1, N2 + cdef double nu1, nu2 + N1 = len(s1)-1 + N2 = len(s2)-1 + + nu1 = s1[1]-s1[0] + nu2 = s2[1]-s2[0] + spike_events = np.empty(N1+N2) + spike_events[0] = s1[0] + # the values have one entry less - the number of intervals between events + isi_values = np.empty(N1+N2-1) + + with nogil: # release the interpreter to allow multithreading + isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) + index1 = 0 + index2 = 0 + index = 1 + while True: + # check which spike is next - from s1 or s2 + if s1[index1+1] < s2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1 >= N1: + break + spike_events[index] = s1[index1] + nu1 = s1[index1+1]-s1[index1] + elif s1[index1+1] > s2[index2+1]: + index2 += 1 + if index2 >= N2: + break + spike_events[index] = s2[index2] + nu2 = s2[index2+1]-s2[index2] + else: # s1[index1+1] == s2[index2+1] + index1 += 1 + index2 += 1 + if (index1 >= N1) or (index2 >= N2): + break + spike_events[index] = s1[index1] + nu1 = s1[index1+1]-s1[index1] + nu2 = s2[index2+1]-s2[index2] + # compute the corresponding isi-distance + isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) + index += 1 + # the last event is the interval end + spike_events[index] = s1[N1] + # end nogil + + return spike_events[:index+1], isi_values[:index] + + +############################################################ +# get_min_dist_cython +############################################################ +cdef inline double get_min_dist_cython(double spike_time, + double[:] spike_train, + # use memory view to ensure inlining + # np.ndarray[DTYPE_t,ndim=1] spike_train, + int N, + int start_index=0) nogil: + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + cdef double d, d_temp + d = fabs(spike_time - spike_train[start_index]) + start_index += 1 + while start_index < N: + d_temp = fabs(spike_time - spike_train[start_index]) + if d_temp > d: + break + else: + d = d_temp + start_index += 1 + return d + + +############################################################ +# isi_avrg_cython +############################################################ +cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: + return 0.5*(isi1+isi2)*(isi1+isi2) + # alternative definition to obtain ~ 0.5 for Poisson spikes + # return 0.5*(isi1*isi1+isi2*isi2) + + +############################################################ +# spike_distance_cython +############################################################ +def spike_distance_cython(double[:] t1, + double[:] t2): + + cdef double[:] spike_events + cdef double[:] y_starts + cdef double[:] y_ends + + cdef int N1, N2, index1, index2, index + cdef double dt_p1, dt_p2, dt_f1, dt_f2, isi1, isi2, s1, s2 + + N1 = len(t1) + N2 = len(t2) + + spike_events = np.empty(N1+N2-2) + spike_events[0] = t1[0] + y_starts = np.empty(len(spike_events)-1) + y_ends = np.empty(len(spike_events)-1) + + with nogil: # release the interpreter to allow multithreading + index1 = 0 + index2 = 0 + index = 1 + dt_p1 = 0.0 + dt_f1 = get_min_dist_cython(t1[1], t2, N2, 0) + dt_p2 = 0.0 + dt_f2 = get_min_dist_cython(t2[1], t1, N1, 0) + isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) + isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) + s1 = dt_f1*(t1[1]-t1[0])/isi1 + s2 = dt_f2*(t2[1]-t2[0])/isi2 + y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + while True: + # print(index, index1, index2) + if t1[index1+1] < t2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1+1 >= N1: + break + spike_events[index] = t1[index1] + # first calculate the previous interval end value + dt_p1 = dt_f1 # the previous time now was the following time before + s1 = dt_p1 + s2 = (dt_p2*(t2[index2+1]-t1[index1]) + + dt_f2*(t1[index1]-t2[index2])) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # now the next interval start value + dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) + isi1 = t1[index1+1]-t1[index1] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + elif t1[index1+1] > t2[index2+1]: + index2 += 1 + if index2+1 >= N2: + break + spike_events[index] = t2[index2] + # first calculate the previous interval end value + dt_p2 = dt_f2 # the previous time now was the following time before + s1 = (dt_p1*(t1[index1+1]-t2[index2]) + + dt_f1*(t2[index2]-t1[index1])) / isi1 + s2 = dt_p2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # now the next interval start value + dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) + #s2 = dt_f2 + isi2 = t2[index2+1]-t2[index2] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + else: # t1[index1+1] == t2[index2+1] - generate only one event + index1 += 1 + index2 += 1 + if (index1+1 >= N1) or (index2+1 >= N2): + break + spike_events[index] = t1[index1] + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + dt_p1 = 0.0 + dt_p2 = 0.0 + dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) + dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) + isi1 = t1[index1+1]-t1[index1] + isi2 = t2[index2+1]-t2[index2] + index += 1 + # the last event is the interval end + spike_events[index] = t1[N1-1] + # the ending value of the last interval + isi1 = max(t1[N1-1]-t1[N1-2], t1[N1-2]-t1[N1-3]) + isi2 = max(t2[N2-1]-t2[N2-2], t2[N2-2]-t2[N2-3]) + s1 = dt_p1*(t1[N1-1]-t1[N1-2])/isi1 + s2 = dt_p2*(t2[N2-1]-t2[N2-2])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index+1], y_starts[:index], y_ends[:index] + + + +############################################################ +# coincidence_python +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, int i, int j): + cdef double m = 1E100 # some huge number + cdef int N1 = len(spikes1)-2 + cdef int N2 = len(spikes2)-2 + if i < N1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 1: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 1: + m = fmin(m, spikes2[j]-spikes2[j-1]) + return 0.5*m + + +############################################################ +# coincidence_cython +############################################################ +def coincidence_cython(double[:] spikes1, double[:] spikes2): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = 0 + cdef int j = 0 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 - 2) # spike times + cdef double[:] c = np.zeros(N1 + N2 - 2) # coincidences + cdef double[:] mp = np.ones(N1 + N2 - 2) # multiplicity + cdef double tau + while n < N1 + N2 - 2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if j > 0 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if i > 0 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + # add only one event, but with coincidence 2 and multiplicity 2 + st[n] = spikes1[i] + c[n] = 2 + mp[n] = 2 + + st = st[:n+2] + c = c[:n+2] + mp = mp[:n+2] + + st[0] = spikes1[0] + st[len(st)-1] = spikes1[len(spikes1)-1] + c[0] = c[1] + c[len(c)-1] = c[len(c)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + + return st, c, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py new file mode 100644 index 0000000..481daf9 --- /dev/null +++ b/pyspike/cython/python_backend.py @@ -0,0 +1,485 @@ +""" python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2014, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# isi_distance_python +############################################################ +def isi_distance_python(s1, s2): + """ Plain Python implementation of the isi distance. + """ + # compute the interspike interval + nu1 = s1[1:] - s1[:-1] + nu2 = s2[1:] - s2[:-1] + + # compute the isi-distance + spike_events = np.empty(len(nu1) + len(nu2)) + spike_events[0] = s1[0] + # the values have one entry less - the number of intervals between events + isi_values = np.empty(len(spike_events) - 1) + # add the distance of the first events + # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ + # else 1.0 - nu2[0]/nu1[0] + isi_values[0] = abs(nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) + index1 = 0 + index2 = 0 + index = 1 + while True: + # check which spike is next - from s1 or s2 + if s1[index1+1] < s2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1 >= len(nu1): + break + spike_events[index] = s1[index1] + elif s1[index1+1] > s2[index2+1]: + index2 += 1 + if index2 >= len(nu2): + break + spike_events[index] = s2[index2] + else: # s1[index1 + 1] == s2[index2 + 1] + index1 += 1 + index2 += 1 + if (index1 >= len(nu1)) or (index2 >= len(nu2)): + break + spike_events[index] = s1[index1] + # compute the corresponding isi-distance + isi_values[index] = abs(nu1[index1] - nu2[index2]) / \ + max(nu1[index1], nu2[index2]) + index += 1 + # the last event is the interval end + spike_events[index] = s1[-1] + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index + 1], isi_values[:index] + + +############################################################ +# get_min_dist +############################################################ +def get_min_dist(spike_time, spike_train, start_index=0): + """ Returns the minimal distance |spike_time - spike_train[i]| + with i>=start_index. + """ + d = abs(spike_time - spike_train[start_index]) + start_index += 1 + while start_index < len(spike_train): + d_temp = abs(spike_time - spike_train[start_index]) + if d_temp > d: + break + else: + d = d_temp + start_index += 1 + return d + + +############################################################ +# spike_distance_python +############################################################ +def spike_distance_python(spikes1, spikes2): + """ Computes the instantaneous spike-distance S_spike (t) of the two given + spike trains. The spike trains are expected to have auxiliary spikes at the + beginning and end of the interval. Use the function add_auxiliary_spikes to + add those spikes to the spike train. + Args: + - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. + Returns: + - PieceWiseLinFunc describing the spike-distance. + """ + # check for auxiliary spikes - first and last spikes should be identical + assert spikes1[0] == spikes2[0], \ + "Given spike trains seems not to have auxiliary spikes!" + assert spikes1[-1] == spikes2[-1], \ + "Given spike trains seems not to have auxiliary spikes!" + # shorter variables + t1 = spikes1 + t2 = spikes2 + + spike_events = np.empty(len(t1) + len(t2) - 2) + spike_events[0] = t1[0] + y_starts = np.empty(len(spike_events) - 1) + y_ends = np.empty(len(spike_events) - 1) + + index1 = 0 + index2 = 0 + index = 1 + dt_p1 = 0.0 + dt_f1 = get_min_dist(t1[1], t2, 0) + dt_p2 = 0.0 + dt_f2 = get_min_dist(t2[1], t1, 0) + isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) + isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) + s1 = dt_f1*(t1[1]-t1[0])/isi1 + s2 = dt_f2*(t2[1]-t2[0])/isi2 + y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + while True: + # print(index, index1, index2) + if t1[index1+1] < t2[index2+1]: + index1 += 1 + # break condition relies on existence of spikes at T_end + if index1+1 >= len(t1): + break + spike_events[index] = t1[index1] + # first calculate the previous interval end value + dt_p1 = dt_f1 # the previous time was the following time before + s1 = dt_p1 + s2 = (dt_p2*(t2[index2+1]-t1[index1]) + + dt_f2*(t1[index1]-t2[index2])) / isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f1 = get_min_dist(t1[index1+1], t2, index2) + isi1 = t1[index1+1]-t1[index1] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + elif t1[index1+1] > t2[index2+1]: + index2 += 1 + if index2+1 >= len(t2): + break + spike_events[index] = t2[index2] + # first calculate the previous interval end value + dt_p2 = dt_f2 # the previous time was the following time before + s1 = (dt_p1*(t1[index1+1]-t2[index2]) + + dt_f1*(t2[index2]-t1[index1])) / isi1 + s2 = dt_p2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # now the next interval start value + dt_f2 = get_min_dist(t2[index2+1], t1, index1) + #s2 = dt_f2 + isi2 = t2[index2+1]-t2[index2] + # s2 is the same as above, thus we can compute y2 immediately + y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + else: # t1[index1+1] == t2[index2+1] - generate only one event + index1 += 1 + index2 += 1 + if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): + break + assert dt_f2 == 0.0 + assert dt_f1 == 0.0 + spike_events[index] = t1[index1] + y_ends[index-1] = 0.0 + y_starts[index] = 0.0 + dt_p1 = 0.0 + dt_p2 = 0.0 + dt_f1 = get_min_dist(t1[index1+1], t2, index2) + dt_f2 = get_min_dist(t2[index2+1], t1, index1) + isi1 = t1[index1+1]-t1[index1] + isi2 = t2[index2+1]-t2[index2] + index += 1 + # the last event is the interval end + spike_events[index] = t1[-1] + # the ending value of the last interval + isi1 = max(t1[-1]-t1[-2], t1[-2]-t1[-3]) + isi2 = max(t2[-1]-t2[-2], t2[-2]-t2[-3]) + s1 = dt_p1*(t1[-1]-t1[-2])/isi1 + s2 = dt_p2*(t2[-1]-t2[-2])/isi2 + y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) + # use only the data added above + # could be less than original length due to equal spike times + return spike_events[:index+1], y_starts[:index], y_ends[:index] + + +############################################################ +# cumulative_sync_python +############################################################ +def cumulative_sync_python(spikes1, spikes2): + + def get_tau(spikes1, spikes2, i, j): + return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i], + spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]]) + N1 = len(spikes1) + N2 = len(spikes2) + i = 0 + j = 0 + n = 0 + st = np.zeros(N1 + N2 - 2) + c = np.zeros(N1 + N2 - 3) + c[0] = 0 + st[0] = 0 + while n < N1 + N2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if spikes1[i]-spikes2[j] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if spikes2[j]-spikes1[i] > tau: + c[n] = c[n-1] + else: + c[n] = c[n-1]+1 + else: # spikes1[i+1] = spikes2[j+1] + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1] + n += 1 + st[n] = spikes1[i] + c[n] = c[n-1]+1 + c[0] = 0 + st[0] = spikes1[0] + st[-1] = spikes1[-1] + + return st, c + + +############################################################ +# coincidence_python +############################################################ +def coincidence_python(spikes1, spikes2): + + def get_tau(spikes1, spikes2, i, j): + m = 1E100 # some huge number + if i < len(spikes1)-2: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-2: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 1: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 1: + m = min(m, spikes2[j]-spikes2[j-1]) + return 0.5*m + N1 = len(spikes1) + N2 = len(spikes2) + i = 0 + j = 0 + n = 0 + st = np.zeros(N1 + N2 - 2) # spike times + c = np.zeros(N1 + N2 - 2) # coincidences + mp = np.ones(N1 + N2 - 2) # multiplicity + while n < N1 + N2 - 2: + if spikes1[i+1] < spikes2[j+1]: + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes1[i] + if j > 0 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + elif spikes1[i+1] > spikes2[j+1]: + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j) + st[n] = spikes2[j] + if i > 0 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + c[n] = 1 + c[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + if i == N1-1 or j == N2-1: + break + n += 1 + # add only one event, but with coincidence 2 and multiplicity 2 + st[n] = spikes1[i] + c[n] = 2 + mp[n] = 2 + + st = st[:n+2] + c = c[:n+2] + mp = mp[:n+2] + + st[0] = spikes1[0] + st[-1] = spikes1[-1] + c[0] = c[1] + c[-1] = c[-2] + mp[0] = mp[1] + mp[-1] = mp[-2] + + return st, c, mp + + +############################################################ +# add_piece_wise_const_python +############################################################ +def add_piece_wise_const_python(x1, y1, x2, y2): + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty(len(x_new)-1) + x_new[0] = x1[0] + y_new[0] = y1[0] + y2[0] + index1 = 0 + index2 = 0 + index = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + index += 1 + # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + index1 += 1 + x_new[index] = x1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + x_new[index] = x2[index2] + else: # x1[index1+1] == x2[index2+1]: + index1 += 1 + index2 += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1] + index += len(x1)-index1-2 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1] + index += len(x2)-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[-1] + # the last value is again the end of the interval + # x_new[index+1] = x1[-1] + # only use the data that was actually filled + + return x_new[:index+2], y_new[:index+1] + + +############################################################ +# add_piece_lin_const_python +############################################################ +def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): + x_new = np.empty(len(x1) + len(x2)) + y1_new = np.empty(len(x_new)-1) + y2_new = np.empty_like(y1_new) + x_new[0] = x1[0] + y1_new[0] = y11[0] + y21[0] + index1 = 0 # index for self + index2 = 0 # index for f + index = 0 # index for new + while (index1+1 < len(y11)) and (index2+1 < len(y21)): + # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) + if x1[index1+1] < x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) + y2_new[index] = y12[index1] + y + index1 += 1 + index += 1 + x_new[index] = x1[index1] + # and the starting value for the next interval + y1_new[index] = y11[index1] + y + elif x1[index1+1] > x2[index2+1]: + # first compute the end value of the previous interval + # linear interpolation of the interval + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + y2_new[index] = y22[index2] + y + index2 += 1 + index += 1 + x_new[index] = x2[index2] + # and the starting value for the next interval + y1_new[index] = y21[index2] + y + else: # x1[index1+1] == x2[index2+1]: + y2_new[index] = y12[index1] + y22[index2] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y1_new[index] = y11[index1] + y21[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y11): + # compute the linear interpolations values + y = y21[index2] + (y22[index2]-y21[index2]) * \ + (x1[index1+1:-1]-x2[index2]) / (x2[index2+1]-x2[index2]) + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y1_new[index+1:index+1+len(y11)-index1-1] = y11[index1+1:]+y + y2_new[index:index+len(y12)-index1-1] = y12[index1:-1] + y + index += len(x1)-index1-2 + elif index2+1 < len(y21): + # compute the linear interpolations values + y = y11[index1] + (y12[index1]-y11[index1]) * \ + (x2[index2+1:-1]-x1[index1]) / \ + (x1[index1+1]-x1[index1]) + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y + y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y + index += len(x2)-index2-2 + else: # both arrays reached the end simultaneously + # only the last x-value missing + x_new[index+1] = x1[-1] + # finally, the end value for the last interval + y2_new[index] = y12[-1]+y22[-1] + # only use the data that was actually filled + return x_new[:index+2], y1_new[:index+1], y2_new[:index+1] + + +############################################################ +# add_discrete_function_python +############################################################ +def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): + + x_new = np.empty(len(x1) + len(x2)) + y_new = np.empty_like(x_new) + mp_new = np.empty_like(x_new) + x_new[0] = x1[0] + index1 = 0 + index2 = 0 + index = 0 + while (index1+1 < len(y1)) and (index2+1 < len(y2)): + if x1[index1+1] < x2[index2+1]: + index1 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + mp_new[index] = mp1[index1] + elif x1[index1+1] > x2[index2+1]: + index2 += 1 + index += 1 + x_new[index] = x2[index2] + y_new[index] = y2[index2] + mp_new[index] = mp2[index2] + else: # x1[index1+1] == x2[index2+1] + index1 += 1 + index2 += 1 + index += 1 + x_new[index] = x1[index1] + y_new[index] = y1[index1] + y2[index2] + mp_new[index] = mp1[index1] + mp2[index2] + # one array reached the end -> copy the contents of the other to the end + if index1+1 < len(y1): + x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] + y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] + mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] + index += len(x1)-index1-1 + elif index2+1 < len(y2): + x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] + y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] + mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] + index += len(x2)-index2-1 + # else: # both arrays reached the end simultaneously + # x_new[index+1] = x1[-1] + # y_new[index+1] = y1[-1] + y2[-1] + # mp_new[index+1] = mp1[-1] + mp2[-1] + + y_new[0] = y_new[1] + mp_new[0] = mp_new[1] + + # the last value is again the end of the interval + # only use the data that was actually filled + return x_new[:index+1], y_new[:index+1], mp_new[:index+1] + diff --git a/pyspike/cython_add.pyx b/pyspike/cython_add.pyx deleted file mode 100644 index ac64005..0000000 --- a/pyspike/cython_add.pyx +++ /dev/null @@ -1,235 +0,0 @@ -#cython: boundscheck=False -#cython: wraparound=False -#cython: cdivision=True - -""" -cython_add.pyx - -cython implementation of the add function for piece-wise const and -piece-wise linear functions - -Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects -improves the performance of spike_distance by a factor of 10! - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" - -""" -To test whether things can be optimized: remove all yellow stuff -in the html output:: - - cython -a cython_add.pyx - -which gives:: - - cython_add.html - -""" - -import numpy as np -cimport numpy as np - -from libc.math cimport fabs - -DTYPE = np.float -ctypedef np.float_t DTYPE_t - - -############################################################ -# add_piece_wise_const_cython -############################################################ -def add_piece_wise_const_cython(double[:] x1, double[:] y1, - double[:] x2, double[:] y2): - - cdef int N1 = len(x1) - cdef int N2 = len(x2) - cdef double[:] x_new = np.empty(N1+N2) - cdef double[:] y_new = np.empty(N1+N2-1) - cdef int index1 = 0 - cdef int index2 = 0 - cdef int index = 0 - cdef int i - with nogil: # release the interpreter lock to allow multi-threading - x_new[0] = x1[0] - y_new[0] = y1[0] + y2[0] - while (index1+1 < N1-1) and (index2+1 < N2-1): - index += 1 - # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - index1 += 1 - x_new[index] = x1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - x_new[index] = x2[index2] - else: # x1[index1+1] == x2[index2+1]: - index1 += 1 - index2 += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < N1-1: - x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] - for i in xrange(N1-index1-2): - y_new[index+1+i] = y1[index1+1+i] + y2[N2-2] - index += N1-index1-2 - elif index2+1 < N2-1: - x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] - for i in xrange(N2-index2-2): - y_new[index+1+i] = y2[index2+1+i] + y1[N1-2] - index += N2-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[N1-1] - # the last value is again the end of the interval - # x_new[index+1] = x1[-1] - # only use the data that was actually filled - x1 = x_new[:index+2] - y1 = y_new[:index+1] - # end nogil - return np.array(x_new[:index+2]), np.array(y_new[:index+1]) - - -############################################################ -# add_piece_wise_lin_cython -############################################################ -def add_piece_wise_lin_cython(double[:] x1, double[:] y11, double[:] y12, - double[:] x2, double[:] y21, double[:] y22): - cdef int N1 = len(x1) - cdef int N2 = len(x2) - cdef double[:] x_new = np.empty(N1+N2) - cdef double[:] y1_new = np.empty(N1+N2-1) - cdef double[:] y2_new = np.empty_like(y1_new) - cdef int index1 = 0 # index for self - cdef int index2 = 0 # index for f - cdef int index = 0 # index for new - cdef int i - cdef double y - with nogil: # release the interpreter lock to allow multi-threading - x_new[0] = x1[0] - y1_new[0] = y11[0] + y21[0] - while (index1+1 < N1-1) and (index2+1 < N2-1): - # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) - y2_new[index] = y12[index1] + y - index1 += 1 - index += 1 - x_new[index] = x1[index1] - # and the starting value for the next interval - y1_new[index] = y11[index1] + y - elif x1[index1+1] > x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - y2_new[index] = y22[index2] + y - index2 += 1 - index += 1 - x_new[index] = x2[index2] - # and the starting value for the next interval - y1_new[index] = y21[index2] + y - else: # x1[index1+1] == x2[index2+1]: - y2_new[index] = y12[index1] + y22[index2] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y1_new[index] = y11[index1] + y21[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < N1-1: - x_new[index+1:index+1+N1-index1-1] = x1[index1+1:] - for i in xrange(N1-index1-2): - # compute the linear interpolations value - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1+i]-x2[index2]) / (x2[index2+1]-x2[index2]) - y1_new[index+1+i] = y11[index1+1+i] + y - y2_new[index+i] = y12[index1+i] + y - index += N1-index1-2 - elif index2+1 < N2-1: - x_new[index+1:index+1+N2-index2-1] = x2[index2+1:] - # compute the linear interpolations values - for i in xrange(N2-index2-2): - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1+i]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - y1_new[index+1+i] = y21[index2+1+i] + y - y2_new[index+i] = y22[index2+i] + y - index += N2-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[N1-1] - # finally, the end value for the last interval - y2_new[index] = y12[N1-2]+y22[N2-2] - # only use the data that was actually filled - # end nogil - return (np.array(x_new[:index+2]), - np.array(y1_new[:index+1]), - np.array(y2_new[:index+1])) - - -############################################################ -# add_discrete_function_cython -############################################################ -def add_discrete_function_cython(double[:] x1, double[:] y1, double[:] mp1, - double[:] x2, double[:] y2, double[:] mp2): - - cdef double[:] x_new = np.empty(len(x1) + len(x2)) - cdef double[:] y_new = np.empty_like(x_new) - cdef double[:] mp_new = np.empty_like(x_new) - cdef int index1 = 0 - cdef int index2 = 0 - cdef int index = 0 - cdef int N1 = len(y1) - cdef int N2 = len(y2) - x_new[0] = x1[0] - while (index1+1 < N1) and (index2+1 < N2): - if x1[index1+1] < x2[index2+1]: - index1 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] - mp_new[index] = mp1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - index += 1 - x_new[index] = x2[index2] - y_new[index] = y2[index2] - mp_new[index] = mp2[index2] - else: # x1[index1+1] == x2[index2+1] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - mp_new[index] = mp1[index1] + mp2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] - - y_new[0] = y_new[1] - mp_new[0] = mp_new[1] - - # the last value is again the end of the interval - # only use the data that was actually filled - return (np.array(x_new[:index+1]), - np.array(y_new[:index+1]), - np.array(mp_new[:index+1])) diff --git a/pyspike/cython_distance.pyx b/pyspike/cython_distance.pyx deleted file mode 100644 index 489aab9..0000000 --- a/pyspike/cython_distance.pyx +++ /dev/null @@ -1,312 +0,0 @@ -#cython: boundscheck=False -#cython: wraparound=False -#cython: cdivision=True - -""" -cython_distances.pyx - -cython implementation of the isi- and spike-distance - -Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects -improves the performance of spike_distance by a factor of 10! - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" - -""" -To test whether things can be optimized: remove all yellow stuff -in the html output:: - - cython -a cython_distance.pyx - -which gives:: - - cython_distance.html - -""" - -import numpy as np -cimport numpy as np - -from libc.math cimport fabs -from libc.math cimport fmax -from libc.math cimport fmin - -DTYPE = np.float -ctypedef np.float_t DTYPE_t - - -############################################################ -# isi_distance_cython -############################################################ -def isi_distance_cython(double[:] s1, - double[:] s2): - - cdef double[:] spike_events - cdef double[:] isi_values - cdef int index1, index2, index - cdef int N1, N2 - cdef double nu1, nu2 - N1 = len(s1)-1 - N2 = len(s2)-1 - - nu1 = s1[1]-s1[0] - nu2 = s2[1]-s2[0] - spike_events = np.empty(N1+N2) - spike_events[0] = s1[0] - # the values have one entry less - the number of intervals between events - isi_values = np.empty(N1+N2-1) - - with nogil: # release the interpreter to allow multithreading - isi_values[0] = fabs(nu1-nu2)/fmax(nu1, nu2) - index1 = 0 - index2 = 0 - index = 1 - while True: - # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= N1: - break - spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - elif s1[index1+1] > s2[index2+1]: - index2 += 1 - if index2 >= N2: - break - spike_events[index] = s2[index2] - nu2 = s2[index2+1]-s2[index2] - else: # s1[index1+1] == s2[index2+1] - index1 += 1 - index2 += 1 - if (index1 >= N1) or (index2 >= N2): - break - spike_events[index] = s1[index1] - nu1 = s1[index1+1]-s1[index1] - nu2 = s2[index2+1]-s2[index2] - # compute the corresponding isi-distance - isi_values[index] = fabs(nu1 - nu2) / fmax(nu1, nu2) - index += 1 - # the last event is the interval end - spike_events[index] = s1[N1] - # end nogil - - return spike_events[:index+1], isi_values[:index] - - -############################################################ -# get_min_dist_cython -############################################################ -cdef inline double get_min_dist_cython(double spike_time, - double[:] spike_train, - # use memory view to ensure inlining - # np.ndarray[DTYPE_t,ndim=1] spike_train, - int N, - int start_index=0) nogil: - """ Returns the minimal distance |spike_time - spike_train[i]| - with i>=start_index. - """ - cdef double d, d_temp - d = fabs(spike_time - spike_train[start_index]) - start_index += 1 - while start_index < N: - d_temp = fabs(spike_time - spike_train[start_index]) - if d_temp > d: - break - else: - d = d_temp - start_index += 1 - return d - - -############################################################ -# isi_avrg_cython -############################################################ -cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: - return 0.5*(isi1+isi2)*(isi1+isi2) - # alternative definition to obtain ~ 0.5 for Poisson spikes - # return 0.5*(isi1*isi1+isi2*isi2) - - -############################################################ -# spike_distance_cython -############################################################ -def spike_distance_cython(double[:] t1, - double[:] t2): - - cdef double[:] spike_events - cdef double[:] y_starts - cdef double[:] y_ends - - cdef int N1, N2, index1, index2, index - cdef double dt_p1, dt_p2, dt_f1, dt_f2, isi1, isi2, s1, s2 - - N1 = len(t1) - N2 = len(t2) - - spike_events = np.empty(N1+N2-2) - spike_events[0] = t1[0] - y_starts = np.empty(len(spike_events)-1) - y_ends = np.empty(len(spike_events)-1) - - with nogil: # release the interpreter to allow multithreading - index1 = 0 - index2 = 0 - index = 1 - dt_p1 = 0.0 - dt_f1 = get_min_dist_cython(t1[1], t2, N2, 0) - dt_p2 = 0.0 - dt_f2 = get_min_dist_cython(t2[1], t1, N1, 0) - isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) - isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) - s1 = dt_f1*(t1[1]-t1[0])/isi1 - s2 = dt_f2*(t2[1]-t2[0])/isi2 - y_starts[0] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - while True: - # print(index, index1, index2) - if t1[index1+1] < t2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1+1 >= N1: - break - spike_events[index] = t1[index1] - # first calculate the previous interval end value - dt_p1 = dt_f1 # the previous time now was the following time before - s1 = dt_p1 - s2 = (dt_p2*(t2[index2+1]-t1[index1]) + - dt_f2*(t1[index1]-t2[index2])) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - # now the next interval start value - dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) - isi1 = t1[index1+1]-t1[index1] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - elif t1[index1+1] > t2[index2+1]: - index2 += 1 - if index2+1 >= N2: - break - spike_events[index] = t2[index2] - # first calculate the previous interval end value - dt_p2 = dt_f2 # the previous time now was the following time before - s1 = (dt_p1*(t1[index1+1]-t2[index2]) + - dt_f1*(t2[index2]-t1[index1])) / isi1 - s2 = dt_p2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - # now the next interval start value - dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) - #s2 = dt_f2 - isi2 = t2[index2+1]-t2[index2] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) - else: # t1[index1+1] == t2[index2+1] - generate only one event - index1 += 1 - index2 += 1 - if (index1+1 >= N1) or (index2+1 >= N2): - break - spike_events[index] = t1[index1] - y_ends[index-1] = 0.0 - y_starts[index] = 0.0 - dt_p1 = 0.0 - dt_p2 = 0.0 - dt_f1 = get_min_dist_cython(t1[index1+1], t2, N2, index2) - dt_f2 = get_min_dist_cython(t2[index2+1], t1, N1, index1) - isi1 = t1[index1+1]-t1[index1] - isi2 = t2[index2+1]-t2[index2] - index += 1 - # the last event is the interval end - spike_events[index] = t1[N1-1] - # the ending value of the last interval - isi1 = max(t1[N1-1]-t1[N1-2], t1[N1-2]-t1[N1-3]) - isi2 = max(t2[N2-1]-t2[N2-2], t2[N2-2]-t2[N2-3]) - s1 = dt_p1*(t1[N1-1]-t1[N1-2])/isi1 - s2 = dt_p2*(t2[N2-1]-t2[N2-2])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) - # end nogil - - # use only the data added above - # could be less than original length due to equal spike times - return spike_events[:index+1], y_starts[:index], y_ends[:index] - - - -############################################################ -# coincidence_python -############################################################ -cdef inline double get_tau(double[:] spikes1, double[:] spikes2, int i, int j): - cdef double m = 1E100 # some huge number - cdef int N1 = len(spikes1)-2 - cdef int N2 = len(spikes2)-2 - if i < N1: - m = fmin(m, spikes1[i+1]-spikes1[i]) - if j < N2: - m = fmin(m, spikes2[j+1]-spikes2[j]) - if i > 1: - m = fmin(m, spikes1[i]-spikes1[i-1]) - if j > 1: - m = fmin(m, spikes2[j]-spikes2[j-1]) - return 0.5*m - - -############################################################ -# coincidence_cython -############################################################ -def coincidence_cython(double[:] spikes1, double[:] spikes2): - - cdef int N1 = len(spikes1) - cdef int N2 = len(spikes2) - cdef int i = 0 - cdef int j = 0 - cdef int n = 0 - cdef double[:] st = np.zeros(N1 + N2 - 2) # spike times - cdef double[:] c = np.zeros(N1 + N2 - 2) # coincidences - cdef double[:] mp = np.ones(N1 + N2 - 2) # multiplicity - cdef double tau - while n < N1 + N2 - 2: - if spikes1[i+1] < spikes2[j+1]: - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes1[i] - if j > 0 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - elif spikes1[i+1] > spikes2[j+1]: - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes2[j] - if i > 0 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - if i == N1-1 or j == N2-1: - break - n += 1 - # add only one event, but with coincidence 2 and multiplicity 2 - st[n] = spikes1[i] - c[n] = 2 - mp[n] = 2 - - st = st[:n+2] - c = c[:n+2] - mp = mp[:n+2] - - st[0] = spikes1[0] - st[len(st)-1] = spikes1[len(spikes1)-1] - c[0] = c[1] - c[len(c)-1] = c[len(c)-2] - mp[0] = mp[1] - mp[len(mp)-1] = mp[len(mp)-2] - - return st, c, mp diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index 745d280..c2ef8e8 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -35,13 +35,15 @@ def isi_profile(spikes1, spikes2): # load cython implementation try: - from cython_distance import isi_distance_cython as isi_distance_impl + from cython.cython_distance import isi_distance_cython \ + as isi_distance_impl except ImportError: print("Warning: isi_distance_cython not found. Make sure that PySpike \ is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from python_backend import isi_distance_python as isi_distance_impl + from cython.python_backend import isi_distance_python \ + as isi_distance_impl times, values = isi_distance_impl(spikes1, spikes2) return PieceWiseConstFunc(times, values) diff --git a/pyspike/python_backend.py b/pyspike/python_backend.py deleted file mode 100644 index 481daf9..0000000 --- a/pyspike/python_backend.py +++ /dev/null @@ -1,485 +0,0 @@ -""" python_backend.py - -Collection of python functions that can be used instead of the cython -implementation. - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License - -""" - -import numpy as np - - -############################################################ -# isi_distance_python -############################################################ -def isi_distance_python(s1, s2): - """ Plain Python implementation of the isi distance. - """ - # compute the interspike interval - nu1 = s1[1:] - s1[:-1] - nu2 = s2[1:] - s2[:-1] - - # compute the isi-distance - spike_events = np.empty(len(nu1) + len(nu2)) - spike_events[0] = s1[0] - # the values have one entry less - the number of intervals between events - isi_values = np.empty(len(spike_events) - 1) - # add the distance of the first events - # isi_values[0] = nu1[0]/nu2[0] - 1.0 if nu1[0] <= nu2[0] \ - # else 1.0 - nu2[0]/nu1[0] - isi_values[0] = abs(nu1[0] - nu2[0]) / max(nu1[0], nu2[0]) - index1 = 0 - index2 = 0 - index = 1 - while True: - # check which spike is next - from s1 or s2 - if s1[index1+1] < s2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1 >= len(nu1): - break - spike_events[index] = s1[index1] - elif s1[index1+1] > s2[index2+1]: - index2 += 1 - if index2 >= len(nu2): - break - spike_events[index] = s2[index2] - else: # s1[index1 + 1] == s2[index2 + 1] - index1 += 1 - index2 += 1 - if (index1 >= len(nu1)) or (index2 >= len(nu2)): - break - spike_events[index] = s1[index1] - # compute the corresponding isi-distance - isi_values[index] = abs(nu1[index1] - nu2[index2]) / \ - max(nu1[index1], nu2[index2]) - index += 1 - # the last event is the interval end - spike_events[index] = s1[-1] - # use only the data added above - # could be less than original length due to equal spike times - return spike_events[:index + 1], isi_values[:index] - - -############################################################ -# get_min_dist -############################################################ -def get_min_dist(spike_time, spike_train, start_index=0): - """ Returns the minimal distance |spike_time - spike_train[i]| - with i>=start_index. - """ - d = abs(spike_time - spike_train[start_index]) - start_index += 1 - while start_index < len(spike_train): - d_temp = abs(spike_time - spike_train[start_index]) - if d_temp > d: - break - else: - d = d_temp - start_index += 1 - return d - - -############################################################ -# spike_distance_python -############################################################ -def spike_distance_python(spikes1, spikes2): - """ Computes the instantaneous spike-distance S_spike (t) of the two given - spike trains. The spike trains are expected to have auxiliary spikes at the - beginning and end of the interval. Use the function add_auxiliary_spikes to - add those spikes to the spike train. - Args: - - spikes1, spikes2: ordered arrays of spike times with auxiliary spikes. - Returns: - - PieceWiseLinFunc describing the spike-distance. - """ - # check for auxiliary spikes - first and last spikes should be identical - assert spikes1[0] == spikes2[0], \ - "Given spike trains seems not to have auxiliary spikes!" - assert spikes1[-1] == spikes2[-1], \ - "Given spike trains seems not to have auxiliary spikes!" - # shorter variables - t1 = spikes1 - t2 = spikes2 - - spike_events = np.empty(len(t1) + len(t2) - 2) - spike_events[0] = t1[0] - y_starts = np.empty(len(spike_events) - 1) - y_ends = np.empty(len(spike_events) - 1) - - index1 = 0 - index2 = 0 - index = 1 - dt_p1 = 0.0 - dt_f1 = get_min_dist(t1[1], t2, 0) - dt_p2 = 0.0 - dt_f2 = get_min_dist(t2[1], t1, 0) - isi1 = max(t1[1]-t1[0], t1[2]-t1[1]) - isi2 = max(t2[1]-t2[0], t2[2]-t2[1]) - s1 = dt_f1*(t1[1]-t1[0])/isi1 - s2 = dt_f2*(t2[1]-t2[0])/isi2 - y_starts[0] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - while True: - # print(index, index1, index2) - if t1[index1+1] < t2[index2+1]: - index1 += 1 - # break condition relies on existence of spikes at T_end - if index1+1 >= len(t1): - break - spike_events[index] = t1[index1] - # first calculate the previous interval end value - dt_p1 = dt_f1 # the previous time was the following time before - s1 = dt_p1 - s2 = (dt_p2*(t2[index2+1]-t1[index1]) + - dt_f2*(t1[index1]-t2[index2])) / isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - # now the next interval start value - dt_f1 = get_min_dist(t1[index1+1], t2, index2) - isi1 = t1[index1+1]-t1[index1] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - elif t1[index1+1] > t2[index2+1]: - index2 += 1 - if index2+1 >= len(t2): - break - spike_events[index] = t2[index2] - # first calculate the previous interval end value - dt_p2 = dt_f2 # the previous time was the following time before - s1 = (dt_p1*(t1[index1+1]-t2[index2]) + - dt_f1*(t2[index2]-t1[index1])) / isi1 - s2 = dt_p2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - # now the next interval start value - dt_f2 = get_min_dist(t2[index2+1], t1, index1) - #s2 = dt_f2 - isi2 = t2[index2+1]-t2[index2] - # s2 is the same as above, thus we can compute y2 immediately - y_starts[index] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - else: # t1[index1+1] == t2[index2+1] - generate only one event - index1 += 1 - index2 += 1 - if (index1+1 >= len(t1)) or (index2+1 >= len(t2)): - break - assert dt_f2 == 0.0 - assert dt_f1 == 0.0 - spike_events[index] = t1[index1] - y_ends[index-1] = 0.0 - y_starts[index] = 0.0 - dt_p1 = 0.0 - dt_p2 = 0.0 - dt_f1 = get_min_dist(t1[index1+1], t2, index2) - dt_f2 = get_min_dist(t2[index2+1], t1, index1) - isi1 = t1[index1+1]-t1[index1] - isi2 = t2[index2+1]-t2[index2] - index += 1 - # the last event is the interval end - spike_events[index] = t1[-1] - # the ending value of the last interval - isi1 = max(t1[-1]-t1[-2], t1[-2]-t1[-3]) - isi2 = max(t2[-1]-t2[-2], t2[-2]-t2[-3]) - s1 = dt_p1*(t1[-1]-t1[-2])/isi1 - s2 = dt_p2*(t2[-1]-t2[-2])/isi2 - y_ends[index-1] = (s1*isi2 + s2*isi1) / ((isi1+isi2)**2/2) - # use only the data added above - # could be less than original length due to equal spike times - return spike_events[:index+1], y_starts[:index], y_ends[:index] - - -############################################################ -# cumulative_sync_python -############################################################ -def cumulative_sync_python(spikes1, spikes2): - - def get_tau(spikes1, spikes2, i, j): - return 0.5*min([spikes1[i]-spikes1[i-1], spikes1[i+1]-spikes1[i], - spikes2[j]-spikes2[j-1], spikes2[j+1]-spikes2[j]]) - N1 = len(spikes1) - N2 = len(spikes2) - i = 0 - j = 0 - n = 0 - st = np.zeros(N1 + N2 - 2) - c = np.zeros(N1 + N2 - 3) - c[0] = 0 - st[0] = 0 - while n < N1 + N2: - if spikes1[i+1] < spikes2[j+1]: - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes1[i] - if spikes1[i]-spikes2[j] > tau: - c[n] = c[n-1] - else: - c[n] = c[n-1]+1 - elif spikes1[i+1] > spikes2[j+1]: - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes2[j] - if spikes2[j]-spikes1[i] > tau: - c[n] = c[n-1] - else: - c[n] = c[n-1]+1 - else: # spikes1[i+1] = spikes2[j+1] - j += 1 - i += 1 - if i == N1-1 or j == N2-1: - break - n += 1 - st[n] = spikes1[i] - c[n] = c[n-1] - n += 1 - st[n] = spikes1[i] - c[n] = c[n-1]+1 - c[0] = 0 - st[0] = spikes1[0] - st[-1] = spikes1[-1] - - return st, c - - -############################################################ -# coincidence_python -############################################################ -def coincidence_python(spikes1, spikes2): - - def get_tau(spikes1, spikes2, i, j): - m = 1E100 # some huge number - if i < len(spikes1)-2: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-2: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 1: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 1: - m = min(m, spikes2[j]-spikes2[j-1]) - return 0.5*m - N1 = len(spikes1) - N2 = len(spikes2) - i = 0 - j = 0 - n = 0 - st = np.zeros(N1 + N2 - 2) # spike times - c = np.zeros(N1 + N2 - 2) # coincidences - mp = np.ones(N1 + N2 - 2) # multiplicity - while n < N1 + N2 - 2: - if spikes1[i+1] < spikes2[j+1]: - i += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes1[i] - if j > 0 and spikes1[i]-spikes2[j] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - elif spikes1[i+1] > spikes2[j+1]: - j += 1 - n += 1 - tau = get_tau(spikes1, spikes2, i, j) - st[n] = spikes2[j] - if i > 0 and spikes2[j]-spikes1[i] < tau: - # coincidence between the current spike and the previous spike - # both get marked with 1 - c[n] = 1 - c[n-1] = 1 - else: # spikes1[i+1] = spikes2[j+1] - # advance in both spike trains - j += 1 - i += 1 - if i == N1-1 or j == N2-1: - break - n += 1 - # add only one event, but with coincidence 2 and multiplicity 2 - st[n] = spikes1[i] - c[n] = 2 - mp[n] = 2 - - st = st[:n+2] - c = c[:n+2] - mp = mp[:n+2] - - st[0] = spikes1[0] - st[-1] = spikes1[-1] - c[0] = c[1] - c[-1] = c[-2] - mp[0] = mp[1] - mp[-1] = mp[-2] - - return st, c, mp - - -############################################################ -# add_piece_wise_const_python -############################################################ -def add_piece_wise_const_python(x1, y1, x2, y2): - x_new = np.empty(len(x1) + len(x2)) - y_new = np.empty(len(x_new)-1) - x_new[0] = x1[0] - y_new[0] = y1[0] + y2[0] - index1 = 0 - index2 = 0 - index = 0 - while (index1+1 < len(y1)) and (index2+1 < len(y2)): - index += 1 - # print(index1+1, x1[index1+1], y1[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - index1 += 1 - x_new[index] = x1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - x_new[index] = x2[index2] - else: # x1[index1+1] == x2[index2+1]: - index1 += 1 - index2 += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(y1)-index1-1] = y1[index1+1:] + y2[-1] - index += len(x1)-index1-2 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(y2)-index2-1] = y2[index2+1:] + y1[-1] - index += len(x2)-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[-1] - # the last value is again the end of the interval - # x_new[index+1] = x1[-1] - # only use the data that was actually filled - - return x_new[:index+2], y_new[:index+1] - - -############################################################ -# add_piece_lin_const_python -############################################################ -def add_piece_wise_lin_python(x1, y11, y12, x2, y21, y22): - x_new = np.empty(len(x1) + len(x2)) - y1_new = np.empty(len(x_new)-1) - y2_new = np.empty_like(y1_new) - x_new[0] = x1[0] - y1_new[0] = y11[0] + y21[0] - index1 = 0 # index for self - index2 = 0 # index for f - index = 0 # index for new - while (index1+1 < len(y11)) and (index2+1 < len(y21)): - # print(index1+1, x1[index1+1], self.y[index1+1], x_new[index]) - if x1[index1+1] < x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1]-x2[index2]) / (x2[index2+1]-x2[index2]) - y2_new[index] = y12[index1] + y - index1 += 1 - index += 1 - x_new[index] = x1[index1] - # and the starting value for the next interval - y1_new[index] = y11[index1] + y - elif x1[index1+1] > x2[index2+1]: - # first compute the end value of the previous interval - # linear interpolation of the interval - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - y2_new[index] = y22[index2] + y - index2 += 1 - index += 1 - x_new[index] = x2[index2] - # and the starting value for the next interval - y1_new[index] = y21[index2] + y - else: # x1[index1+1] == x2[index2+1]: - y2_new[index] = y12[index1] + y22[index2] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y1_new[index] = y11[index1] + y21[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y11): - # compute the linear interpolations values - y = y21[index2] + (y22[index2]-y21[index2]) * \ - (x1[index1+1:-1]-x2[index2]) / (x2[index2+1]-x2[index2]) - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y1_new[index+1:index+1+len(y11)-index1-1] = y11[index1+1:]+y - y2_new[index:index+len(y12)-index1-1] = y12[index1:-1] + y - index += len(x1)-index1-2 - elif index2+1 < len(y21): - # compute the linear interpolations values - y = y11[index1] + (y12[index1]-y11[index1]) * \ - (x2[index2+1:-1]-x1[index1]) / \ - (x1[index1+1]-x1[index1]) - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y1_new[index+1:index+1+len(y21)-index2-1] = y21[index2+1:] + y - y2_new[index:index+len(y22)-index2-1] = y22[index2:-1] + y - index += len(x2)-index2-2 - else: # both arrays reached the end simultaneously - # only the last x-value missing - x_new[index+1] = x1[-1] - # finally, the end value for the last interval - y2_new[index] = y12[-1]+y22[-1] - # only use the data that was actually filled - return x_new[:index+2], y1_new[:index+1], y2_new[:index+1] - - -############################################################ -# add_discrete_function_python -############################################################ -def add_discrete_function_python(x1, y1, mp1, x2, y2, mp2): - - x_new = np.empty(len(x1) + len(x2)) - y_new = np.empty_like(x_new) - mp_new = np.empty_like(x_new) - x_new[0] = x1[0] - index1 = 0 - index2 = 0 - index = 0 - while (index1+1 < len(y1)) and (index2+1 < len(y2)): - if x1[index1+1] < x2[index2+1]: - index1 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] - mp_new[index] = mp1[index1] - elif x1[index1+1] > x2[index2+1]: - index2 += 1 - index += 1 - x_new[index] = x2[index2] - y_new[index] = y2[index2] - mp_new[index] = mp2[index2] - else: # x1[index1+1] == x2[index2+1] - index1 += 1 - index2 += 1 - index += 1 - x_new[index] = x1[index1] - y_new[index] = y1[index1] + y2[index2] - mp_new[index] = mp1[index1] + mp2[index2] - # one array reached the end -> copy the contents of the other to the end - if index1+1 < len(y1): - x_new[index+1:index+1+len(x1)-index1-1] = x1[index1+1:] - y_new[index+1:index+1+len(x1)-index1-1] = y1[index1+1:] - mp_new[index+1:index+1+len(x1)-index1-1] = mp1[index1+1:] - index += len(x1)-index1-1 - elif index2+1 < len(y2): - x_new[index+1:index+1+len(x2)-index2-1] = x2[index2+1:] - y_new[index+1:index+1+len(x2)-index2-1] = y2[index2+1:] - mp_new[index+1:index+1+len(x2)-index2-1] = mp2[index2+1:] - index += len(x2)-index2-1 - # else: # both arrays reached the end simultaneously - # x_new[index+1] = x1[-1] - # y_new[index+1] = y1[-1] + y2[-1] - # mp_new[index+1] = mp1[-1] + mp2[-1] - - y_new[0] = y_new[1] - mp_new[0] = mp_new[1] - - # the last value is again the end of the interval - # only use the data that was actually filled - return x_new[:index+1], y_new[:index+1], mp_new[:index+1] - diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index 2c989a4..f721c86 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -35,14 +35,15 @@ def spike_profile(spikes1, spikes2): # cython implementation try: - from cython_distance import spike_distance_cython \ + from cython.cython_distance import spike_distance_cython \ as spike_distance_impl except ImportError: print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from python_backend import spike_distance_python as spike_distance_impl + from cython.python_backend import spike_distance_python \ + as spike_distance_impl times, y_starts, y_ends = spike_distance_impl(spikes1, spikes2) return PieceWiseLinFunc(times, y_starts, y_ends) diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index bded8da..342bf69 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -33,14 +33,14 @@ def spike_sync_profile(spikes1, spikes2): # cython implementation try: - from cython_distance import coincidence_cython \ + from cython.cython_distance import coincidence_cython \ as coincidence_impl except ImportError: print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from python_backend import coincidence_python \ + from cython.python_backend import coincidence_python \ as coincidence_impl times, coincidences, multiplicity = coincidence_impl(spikes1, spikes2) diff --git a/setup.py b/setup.py index 9cab5de..289d521 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,8 @@ except ImportError: else: use_cython = True -if os.path.isfile("pyspike/cython_add.c") and \ - os.path.isfile("pyspike/cython_distance.c"): +if os.path.isfile("pyspike/cython/cython_add.c") and \ + os.path.isfile("pyspike/cython/cython_distance.c"): use_c = True else: use_c = False @@ -32,14 +32,14 @@ ext_modules = [] if use_cython: # Cython is available, compile .pyx -> .c ext_modules += [ - Extension("pyspike.cython_add", ["pyspike/cython_add.pyx"]), - Extension("pyspike.cython_distance", ["pyspike/cython_distance.pyx"]), + Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.pyx"]), + Extension("pyspike.cython.cython_distance", ["pyspike/cython/cython_distance.pyx"]), ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries ext_modules += [ - Extension("pyspike.cython_add", ["pyspike/cython_add.c"]), - Extension("pyspike.cython_distance", ["pyspike/cython_distance.c"]), + Extension("pyspike.cython.cython_add", ["pyspike/cython/cython_add.c"]), + Extension("pyspike.cython.cython_distance", ["pyspike/cython/cython_distance.c"]), ] # neither cython nor c files available -> automatic fall-back to python backend @@ -78,7 +78,7 @@ train similarity', 'Programming Language :: Python :: 2.7', ], package_data={ - 'pyspike': ['cython_add.c', 'cython_distance.c'], + 'pyspike': ['cython/cython_add.c', 'cython/cython_distance.c'], 'test': ['Spike_testdata.txt'] } ) -- cgit v1.2.3 From cc8ae1974454307de4c69d9bb2a860538f0adfef Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 27 Apr 2015 17:27:24 +0200 Subject: updated docs --- Readme.rst | 80 +++++++++++++++++++++---------------------- doc/pyspike.rst | 54 +++++++++++++++-------------- pyspike/DiscreteFunc.py | 12 +++---- pyspike/PieceWiseConstFunc.py | 12 +++---- pyspike/PieceWiseLinFunc.py | 12 +++---- pyspike/SpikeTrain.py | 41 ++++++++++++++++------ pyspike/isi_distance.py | 59 ++++++++++++++++--------------- pyspike/spike_distance.py | 58 +++++++++++++++---------------- pyspike/spike_sync.py | 30 ++++++++-------- pyspike/spikes.py | 32 +++++++---------- 10 files changed, 196 insertions(+), 194 deletions(-) (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/Readme.rst b/Readme.rst index 03441fc..e80c0f7 100644 --- a/Readme.rst +++ b/Readme.rst @@ -19,6 +19,14 @@ All source codes are available on `Github `_. -To quickly obtain spike trains from such files, PySpike provides the function :code:`load_spike_trains_from_txt`. +To quickly obtain spike trains from such files, PySpike provides the function :func:`.load_spike_trains_from_txt`. .. code:: python @@ -88,22 +98,13 @@ To quickly obtain spike trains from such files, PySpike provides the function :c import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("SPIKY_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) This function expects the name of the data file as first parameter. -Additionally, the time interval of the spike train measurement can be provided as a pair of start- and end-time values. -If the time interval is provided (:code:`time_interval is not None`), auxiliary spikes at the start- and end-time of the interval are added to the spike trains. +Furthermore, the time interval of the spike train measurement (edges of the spike trains) should be provided as a pair of start- and end-time values. Furthermore, the spike trains are sorted via :code:`np.sort` (disable this feature by providing :code:`is_sorted=True` as a parameter to the load function). -As result, :code:`load_spike_trains_from_txt` returns a *list of arrays* containing the spike trains in the text file. - -If you load spike trains yourself, i.e. from data files with different structure, you can use the helper function :code:`add_auxiliary_spikes` to add the auxiliary spikes at the beginning and end of the observation interval. -Both the ISI and the SPIKE distance computation require the presence of auxiliary spikes, so make sure you have those in your spike trains: +As result, :func:`.load_spike_trains_from_txt` returns a *list of arrays* containing the spike trains in the text file. -.. code:: python - - spike_train = spk.add_auxiliary_spikes(spike_train, (T_start, T_end)) - # if you provide only a single value, it is interpreted as T_end, while T_start=0 - spike_train = spk.add_auxiliary_spikes(spike_train, T_end) Computing bivariate distances profiles --------------------------------------- @@ -114,19 +115,18 @@ Computing bivariate distances profiles Spike trains are expected to be *sorted*! For performance reasons, the PySpike distance functions do not check if the spike trains provided are indeed sorted. - Make sure that all your spike trains are sorted, which is ensured if you use the `load_spike_trains_from_txt` function with the parameter `is_sorted=False`. - If in doubt, use :code:`spike_train = np.sort(spike_train)` to obtain a correctly sorted spike train. - - Furthermore, the spike trains should have auxiliary spikes at the beginning and end of the observation interval. - You can ensure this by providing the :code:`time_interval` in the :code:`load_spike_trains_from_txt` function, or calling :code:`add_auxiliary_spikes` for your spike trains. - The spike trains must have *the same* observation interval! + Make sure that all your spike trains are sorted, which is ensured if you use the :func:`.load_spike_trains_from_txt` function with the parameter `is_sorted=False` (default). + If in doubt, use :meth:`.SpikeTrain.sort()` to ensure a correctly sorted spike train. ----------------------- + If you need to copy a spike train, use the :meth:`.SpikeTrain.copy()` method. + Simple assignment `t2 = t1` does not create a copy of the spike train data, but a reference as `numpy.array` is used for storing the data. + +------------------------------ ISI-distance ............ -The following code loads some exemplary spike trains, computes the dissimilarity profile of the ISI-distance of the first two spike trains, and plots it with matplotlib: +The following code loads some exemplary spike trains, computes the dissimilarity profile of the ISI-distance of the first two :class:`.SpikeTrain` s, and plots it with matplotlib: .. code:: python @@ -134,18 +134,18 @@ The following code loads some exemplary spike trains, computes the dissimilarity import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) isi_profile = spk.isi_profile(spike_trains[0], spike_trains[1]) x, y = isi_profile.get_plottable_data() plt.plot(x, y, '--k') print("ISI distance: %.8f" % isi_profile.avrg()) plt.show() -The ISI-profile is a piece-wise constant function, and hence the function :code:`isi_profile` returns an instance of the :code:`PieceWiseConstFunc` class. +The ISI-profile is a piece-wise constant function, and hence the function :func:`.isi_profile` returns an instance of the :class:`.PieceWiseConstFunc` class. As shown above, this class allows you to obtain arrays that can be used to plot the function with :code:`plt.plt`, but also to compute the time average, which amounts to the final scalar ISI-distance. -By default, the time average is computed for the whole :code:`PieceWiseConstFunc` function. +By default, the time average is computed for the whole :class:`.PieceWiseConstFunc` function. However, it is also possible to obtain the average of a specific interval by providing a pair of floats defining the start and end of the interval. -In the above example, the following code computes the ISI-distances obtained from averaging the ISI-profile over four different intervals: +For the above example, the following code computes the ISI-distances obtained from averaging the ISI-profile over four different intervals: .. code:: python @@ -168,7 +168,7 @@ where :code:`interval` is optional, as above, and if omitted the ISI-distance is SPIKE-distance .............. -To compute for the spike distance profile you use the function :code:`spike_profile` instead of :code:`isi_profile` above. +To compute for the spike distance profile you use the function :func:`.spike_profile` instead of :code:`isi_profile` above. But the general approach is very similar: .. code:: python @@ -177,7 +177,7 @@ But the general approach is very similar: import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) spike_profile = spk.spike_profile(spike_trains[0], spike_trains[1]) x, y = spike_profile.get_plottable_data() plt.plot(x, y, '--k') @@ -185,9 +185,9 @@ But the general approach is very similar: plt.show() This short example computes and plots the SPIKE-profile of the first two spike trains in the file :code:`PySpike_testdata.txt`. -In contrast to the ISI-profile, a SPIKE-profile is a piece-wise *linear* function and is therefore represented by a :code:`PieceWiseLinFunc` object. -Just like the :code:`PieceWiseConstFunc` for the ISI-profile, the :code:`PieceWiseLinFunc` provides a :code:`get_plottable_data` member function that returns arrays that can be used directly to plot the function. -Furthermore, the :code:`avrg` member function returns the average of the profile defined as the overall SPIKE distance. +In contrast to the ISI-profile, a SPIKE-profile is a piece-wise *linear* function and is therefore represented by a :class:`.PieceWiseLinFunc` object. +Just like the :class:`.PieceWiseConstFunc` for the ISI-profile, the :class:`.PieceWiseLinFunc` provides a :meth:`.PieceWiseLinFunc.get_plottable_data` member function that returns arrays that can be used directly to plot the function. +Furthermore, the :meth:`.PieceWiseLinFunc.avrg` member function returns the average of the profile defined as the overall SPIKE distance. As above, you can provide an interval as a pair of floats as well as a sequence of such pairs to :code:`avrg` to specify the averaging interval if required. Again, you can use @@ -217,9 +217,9 @@ SPIKE synchronization SPIKE synchronization is another approach to measure spike synchrony. In contrast to the SPIKE- and ISI-distance, it measures similarity instead of dissimilarity, i.e. higher values represent larger synchrony. Another difference is that the SPIKE synchronization profile is only defined exactly at the spike times, not for the whole interval of the spike trains. -Therefore, it is represented by a :code:`DiscreteFunction`. +Therefore, it is represented by a :class:`.DiscreteFunction`. -To compute for the spike synchronization profile, PySpike provides the function :code:`spike_sync_profile`. +To compute for the spike synchronization profile, PySpike provides the function :func:`.spike_sync_profile`. The general handling of the profile, however, is similar to the other profiles above: .. code:: python @@ -228,11 +228,11 @@ The general handling of the profile, however, is similar to the other profiles a import pyspike as spk spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) spike_profile = spk.spike_sync_profile(spike_trains[0], spike_trains[1]) x, y = spike_profile.get_plottable_data() -For the direct computation of the overall spike synchronization value within some interval, the :code:`spike_sync` function can be used: +For the direct computation of the overall spike synchronization value within some interval, the :func:`.spike_sync` function can be used: .. code:: python @@ -243,23 +243,23 @@ Computing multivariate profiles and distances ---------------------------------------------- To compute the multivariate ISI-profile, SPIKE-profile or SPIKE-Synchronization profile f a set of spike trains, PySpike provides multi-variate version of the profile function. -The following example computes the multivariate ISI-, SPIKE- and SPIKE-Sync-profile for a list of spike trains: +The following example computes the multivariate ISI-, SPIKE- and SPIKE-Sync-profile for a list of spike trains using the :func:`.isi_profile_multi`, :func:`.spike_profile_multi`, :func:`.spike_sync_profile_multi` functions: .. code:: python spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", - time_interval=(0, 4000)) + edges=(0, 4000)) avrg_isi_profile = spk.isi_profile_multi(spike_trains) avrg_spike_profile = spk.spike_profile_multi(spike_trains) avrg_spike_sync_profile = spk.spike_sync_profile_multi(spike_trains) All functions take an optional parameter :code:`indices`, a list of indices that allows to define the spike trains that should be used for the multivariate profile. -As before, if you are only interested in the distance values, and not in the profile, PySpike offers the functions: :code:`isi_distance_multi`, :code:`spike_distance_multi` and :code:`spike_sync_multi`, that return the scalar overall multivariate ISI- and SPIKE-distance as well as the SPIKE-Synchronization value. +As before, if you are only interested in the distance values, and not in the profile, PySpike offers the functions: :func:`.isi_distance_multi`, :func:`.spike_distance_multi` and :func:`.spike_sync_multi`, that return the scalar overall multivariate ISI- and SPIKE-distance as well as the SPIKE-Synchronization value. Those functions also accept an :code:`interval` parameter that can be used to specify the begin and end of the averaging interval as a pair of floats, if neglected the complete interval is used. Another option to characterize large sets of spike trains are distance matrices. Each entry in the distance matrix represents a bivariate distance (similarity for SPIKE-Synchronization) of two spike trains. -The distance matrix is symmetric and has zero values (ones) at the diagonal. +The distance matrix is symmetric and has zero values (ones) at the diagonal and is computed with the functions :func:`.isi_distance_matrix`, :func:`.spike_distance_matrix` and :func:`.spike_sync_matrix`. The following example computes and plots the ISI- and SPIKE-distance matrix as well as the SPIKE-Synchronization-matrix, with different intervals. .. code:: python diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 6aa36e7..a6dc1a0 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -1,60 +1,64 @@ pyspike package =============== -Submodules ----------- -pyspike.isi_distance module +Classes ---------------------------------------- -.. automodule:: pyspike.isi_distance +SpikeTrain +........................................ +.. automodule:: pyspike.SpikeTrain :members: :undoc-members: :show-inheritance: -pyspike.spike_distance module ----------------------------------------- - -.. automodule:: pyspike.spike_distance +PieceWiseConstFunc +........................................ +.. automodule:: pyspike.PieceWiseConstFunc :members: :undoc-members: :show-inheritance: -pyspike.spike_sync module ----------------------------------------- - -.. automodule:: pyspike.spike_sync +PieceWiseLinFunc +........................................ +.. automodule:: pyspike.PieceWiseLinFunc :members: :undoc-members: :show-inheritance: -pyspike.PieceWiseConstFunc module ----------------------------------------- - -.. automodule:: pyspike.PieceWiseConstFunc +DiscreteFunc +........................................ +.. automodule:: pyspike.DiscreteFunc :members: :undoc-members: :show-inheritance: -pyspike.PieceWiseLinFunc module ----------------------------------------- +Functions +---------- -.. automodule:: pyspike.PieceWiseLinFunc +ISI-distance +........................................ +.. automodule:: pyspike.isi_distance :members: :undoc-members: :show-inheritance: -pyspike.DiscreteFunc module ----------------------------------------- - -.. automodule:: pyspike.DiscreteFunc +SPIKE-distance +........................................ +.. automodule:: pyspike.spike_distance :members: :undoc-members: :show-inheritance: -pyspike.spikes module ----------------------------------------- +SPIKE-synchronization +........................................ +.. automodule:: pyspike.spike_sync + :members: + :undoc-members: + :show-inheritance: +Helper functions +........................................ .. automodule:: pyspike.spikes :members: :undoc-members: diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index bd13e1f..33b7a81 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -1,11 +1,7 @@ -""" -Class representing discrete functions. +# Class representing discrete functions. +# Copyright 2014-2015, Mario Mulansky +# Distributed under the BSD License -Copyright 2014-2015, Mario Mulansky - -Distributed under the BSD License - -""" from __future__ import print_function import numpy as np @@ -174,7 +170,7 @@ class DiscreteFunc(object): def avrg(self, interval=None): """ Computes the average of the interval sequence: - :math:`a = 1/N sum f_n` where N is the number of intervals. + :math:`a = 1/N \\sum f_n` where N is the number of intervals. :param interval: averaging interval given as a pair of floats, a sequence of pairs for averaging multiple intervals, or diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index dc57ab1..41998ef 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -1,11 +1,7 @@ -""" -Class representing piece-wise constant functions. +# Class representing piece-wise constant functions. +# Copyright 2014-2015, Mario Mulansky +# Distributed under the BSD License -Copyright 2014-2015, Mario Mulansky - -Distributed under the BSD License - -""" from __future__ import print_function import numpy as np @@ -103,7 +99,7 @@ class PieceWiseConstFunc(object): def avrg(self, interval=None): """ Computes the average of the piece-wise const function: - :math:`a = 1/T int_0^T f(x) dx` where T is the length of the interval. + :math:`a = 1/T \int_0^T f(x) dx` where T is the length of the interval. :param interval: averaging interval given as a pair of floats, a sequence of pairs for averaging multiple intervals, or diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index bc0aa2a..f2442be 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -1,11 +1,7 @@ -""" -Class representing piece-wise linear functions. +# Class representing piece-wise linear functions. +# Copyright 2014-2015, Mario Mulansky +# Distributed under the BSD License -Copyright 2014-2015, Mario Mulansky - -Distributed under the BSD License - -""" from __future__ import print_function import numpy as np @@ -123,7 +119,7 @@ class PieceWiseLinFunc: def avrg(self, interval=None): """ Computes the average of the piece-wise linear function: - :math:`a = 1/T int_0^T f(x) dx` where T is the length of the interval. + :math:`a = 1/T \int_0^T f(x) dx` where T is the interval length. :param interval: averaging interval given as a pair of floats, a sequence of pairs for averaging multiple intervals, or diff --git a/pyspike/SpikeTrain.py b/pyspike/SpikeTrain.py index d586fe0..a02b7ab 100644 --- a/pyspike/SpikeTrain.py +++ b/pyspike/SpikeTrain.py @@ -1,9 +1,6 @@ -""" Module containing the class representing spike trains for PySpike. - -Copyright 2015, Mario Mulansky - -Distributed under the BSD License -""" +# Module containing the class representing spike trains for PySpike. +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License import numpy as np @@ -11,15 +8,22 @@ import numpy as np class SpikeTrain(object): """ Class representing spike trains for the PySpike Module.""" - def __init__(self, spike_times, edges): - """ Constructs the SpikeTrain + def __init__(self, spike_times, edges, is_sorted=True): + """ Constructs the SpikeTrain. + :param spike_times: ordered array of spike times. :param edges: The edges of the spike train. Given as a pair of floats - (T0, T1) or a single float T1, where then T0=0 is assumed. + (T0, T1) or a single float T1, where then T0=0 is + assumed. + :param is_sorted: If `False`, the spike times will sorted by `np.sort`. + """ # TODO: sanity checks - self.spikes = np.array(spike_times, dtype=float) + if is_sorted: + self.spikes = np.array(spike_times, dtype=float) + else: + self.spikes = np.sort(np.array(spike_times, dtype=float)) try: self.t_start = float(edges[0]) @@ -27,3 +31,20 @@ class SpikeTrain(object): except: self.t_start = 0.0 self.t_end = float(edges) + + def sort(self): + """ Sorts the spike times of this spike train using `np.sort` + """ + self.spikes = np.sort(self.spikes) + + def copy(self): + """ Returns a copy of this spike train. + Use this function if you want to create a real (deep) copy of this + spike train. Simple assignment `t2 = t1` does not create a copy of the + spike train data, but a reference as `numpy.array` is used for storing + the data. + + :return: :class:`.SpikeTrain` copy of this spike train. + + """ + return SpikeTrain(self.spikes.copy(), [self.t_start, self.t_end]) diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index cb8ef54..aeab0df 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -1,11 +1,6 @@ -""" - -Module containing several functions to compute the ISI profiles and distances - -Copyright 2014-2015, Mario Mulansky - -Distributed under the BSD License -""" +# Module containing several functions to compute the ISI profiles and distances +# Copyright 2014-2015, Mario Mulansky +# Distributed under the BSD License from pyspike import PieceWiseConstFunc from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -15,16 +10,16 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix # isi_profile ############################################################ def isi_profile(spike_train1, spike_train2): - """ Computes the isi-distance profile :math:`S_{isi}(t)` of the two given - spike trains. Retruns the profile as a PieceWiseConstFunc object. The S_isi - values are defined positive S_isi(t)>=0. + """ Computes the isi-distance profile :math:`I(t)` of the two given + spike trains. Retruns the profile as a PieceWiseConstFunc object. The + ISI-values are defined positive :math:`I(t)>=0`. :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` + :type spike_train1: :class:`.SpikeTrain` :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` - :returns: The isi-distance profile :math:`S_{isi}(t)` - :rtype: :class:`pyspike.function.PieceWiseConstFunc` + :type spike_train2: :class:`.SpikeTrain` + :returns: The isi-distance profile :math:`I(t)` + :rtype: :class:`.PieceWiseConstFunc` """ # check whether the spike trains are defined for the same interval @@ -54,20 +49,20 @@ Falling back to slow python backend.") # isi_distance ############################################################ def isi_distance(spike_train1, spike_train2, interval=None): - """ Computes the isi-distance I of the given spike trains. The + """ Computes the ISI-distance :math:`D_I` of the given spike trains. The isi-distance is the integral over the isi distance profile - :math:`S_{isi}(t)`: + :math:`I(t)`: - .. math:: I = \int_{T_0}^{T_1} S_{isi}(t) dt. + .. math:: D_I = \\int_{T_0}^{T_1} I(t) dt. :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` + :type spike_train1: :class:`.SpikeTrain` :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` + :type spike_train2: :class:`.SpikeTrain` :param interval: averaging interval given as a pair of floats (T0, T1), if None the average over the whole function is computed. :type interval: Pair of floats or None. - :returns: The isi-distance I. + :returns: The isi-distance :math:`D_I`. :rtype: double """ return isi_profile(spike_train1, spike_train2).avrg(interval) @@ -79,15 +74,17 @@ def isi_distance(spike_train1, spike_train2, interval=None): def isi_profile_multi(spike_trains, indices=None): """ computes the multi-variate isi distance profile for a set of spike trains. That is the average isi-distance of all pairs of spike-trains: - S_isi(t) = 2/((N(N-1)) sum_{} S_{isi}^{i,j}, + + .. math:: = \\frac{2}{N(N-1)} \\sum_{} I^{i,j}, + where the sum goes over all pairs - :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type state: list or None - :returns: The averaged isi profile :math:`(t)` - :rtype: :class:`pyspike.function.PieceWiseConstFunc` + :returns: The averaged isi profile :math:`` + :rtype: :class:`.PieceWiseConstFunc` """ average_dist, M = _generic_profile_multi(spike_trains, isi_profile, indices) @@ -101,16 +98,18 @@ def isi_profile_multi(spike_trains, indices=None): def isi_distance_multi(spike_trains, indices=None, interval=None): """ computes the multi-variate isi-distance for a set of spike-trains. That is the time average of the multi-variate spike profile: - I = \int_0^T 2/((N(N-1)) sum_{} S_{isi}^{i,j}, + + .. math:: D_I = \\int_0^T \\frac{2}{N(N-1)} \\sum_{} I^{i,j}, + where the sum goes over all pairs - :param spike_trains: list of spike trains + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :param interval: averaging interval given as a pair of floats, if None the average over the whole function is computed. :type interval: Pair of floats or None. - :returns: The time-averaged isi distance :math:`I` + :returns: The time-averaged multivariate ISI distance :math:`D_I` :rtype: double """ return isi_profile_multi(spike_trains, indices).avrg(interval) @@ -122,7 +121,7 @@ def isi_distance_multi(spike_trains, indices=None, interval=None): def isi_distance_matrix(spike_trains, indices=None, interval=None): """ Computes the time averaged isi-distance of all pairs of spike-trains. - :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type indices: list or None @@ -130,7 +129,7 @@ def isi_distance_matrix(spike_trains, indices=None, interval=None): the average over the whole function is computed. :type interval: Pair of floats or None. :returns: 2D array with the pair wise time average isi distances - :math:`I_{ij}` + :math:`D_{I}^{ij}` :rtype: np.array """ return _generic_distance_matrix(spike_trains, isi_distance, diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index 8d03d70..cc620d4 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -1,11 +1,6 @@ -""" - -Module containing several functions to compute SPIKE profiles and distances - -Copyright 2014-2015, Mario Mulansky - -Distributed under the BSD License -""" +# Module containing several functions to compute SPIKE profiles and distances +# Copyright 2014-2015, Mario Mulansky +# Distributed under the BSD License from pyspike import PieceWiseLinFunc from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -15,16 +10,16 @@ from pyspike.generic import _generic_profile_multi, _generic_distance_matrix # spike_profile ############################################################ def spike_profile(spike_train1, spike_train2): - """ Computes the spike-distance profile S_spike(t) of the two given spike - trains. Returns the profile as a PieceWiseLinFunc object. The S_spike - values are defined positive S_spike(t)>=0. + """ Computes the spike-distance profile :math:`S(t)` of the two given spike + trains. Returns the profile as a PieceWiseLinFunc object. The SPIKE-values + are defined positive :math:`S(t)>=0`. :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` + :type spike_train1: :class:`.SpikeTrain` :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` - :returns: The spike-distance profile :math:`S_{spike}(t)`. - :rtype: :class:`pyspike.function.PieceWiseLinFunc` + :type spike_train2: :class:`.SpikeTrain` + :returns: The spike-distance profile :math:`S(t)`. + :rtype: :class:`.PieceWiseLinFunc` """ # check whether the spike trains are defined for the same interval @@ -56,15 +51,15 @@ Falling back to slow python backend.") # spike_distance ############################################################ def spike_distance(spike_train1, spike_train2, interval=None): - """ Computes the spike-distance S of the given spike trains. The - spike-distance is the integral over the isi distance profile S_spike(t): + """ Computes the spike-distance :math:`D_S` of the given spike trains. The + spike-distance is the integral over the isi distance profile :math:`S(t)`: - .. math:: S = \int_{T_0}^{T_1} S_{spike}(t) dt. + .. math:: D_S = \int_{T_0}^{T_1} S(t) dt. :param spike_train1: First spike train. - :type spike_train1: :class:`pyspike.SpikeTrain` + :type spike_train1: :class:`.SpikeTrain` :param spike_train2: Second spike train. - :type spike_train2: :class:`pyspike.SpikeTrain` + :type spike_train2: :class:`.SpikeTrain` :param interval: averaging interval given as a pair of floats (T0, T1), if None the average over the whole function is computed. :type interval: Pair of floats or None. @@ -81,15 +76,17 @@ def spike_distance(spike_train1, spike_train2, interval=None): def spike_profile_multi(spike_trains, indices=None): """ Computes the multi-variate spike distance profile for a set of spike trains. That is the average spike-distance of all pairs of spike-trains: - :math:`S_spike(t) = 2/((N(N-1)) sum_{} S_{spike}^{i, j}`, + + .. math:: = \\frac{2}{N(N-1)} \\sum_{} S^{i, j}`, + where the sum goes over all pairs - :param spike_trains: list of spike trains + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type indices: list or None - :returns: The averaged spike profile :math:`(t)` - :rtype: :class:`pyspike.function.PieceWiseLinFunc` + :returns: The averaged spike profile :math:`(t)` + :rtype: :class:`.PieceWiseLinFunc` """ average_dist, M = _generic_profile_multi(spike_trains, spike_profile, @@ -104,17 +101,20 @@ def spike_profile_multi(spike_trains, indices=None): def spike_distance_multi(spike_trains, indices=None, interval=None): """ Computes the multi-variate spike distance for a set of spike trains. That is the time average of the multi-variate spike profile: - S_{spike} = \int_0^T 2/((N(N-1)) sum_{} S_{spike}^{i, j} dt + + .. math:: D_S = \\int_0^T \\frac{2}{N(N-1)} \\sum_{} + S^{i, j} dt + where the sum goes over all pairs - :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type indices: list or None :param interval: averaging interval given as a pair of floats, if None the average over the whole function is computed. :type interval: Pair of floats or None. - :returns: The averaged spike distance S. + :returns: The averaged multi-variate spike distance :math:`D_S`. :rtype: double """ return spike_profile_multi(spike_trains, indices).avrg(interval) @@ -126,7 +126,7 @@ def spike_distance_multi(spike_trains, indices=None, interval=None): def spike_distance_matrix(spike_trains, indices=None, interval=None): """ Computes the time averaged spike-distance of all pairs of spike-trains. - :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param spike_trains: list of :class:`.SpikeTrain` :param indices: list of indices defining which spike trains to use, if None all given spike trains are used (default=None) :type indices: list or None @@ -134,7 +134,7 @@ def spike_distance_matrix(spike_trains, indices=None, interval=None): the average over the whole function is computed. :type interval: Pair of floats or None. :returns: 2D array with the pair wise time average spike distances - :math:`S_{ij}` + :math:`D_S^{ij}` :rtype: np.array """ return _generic_distance_matrix(spike_trains, spike_distance, diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 8ddd32c..9d2e363 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -1,12 +1,7 @@ -""" - -Module containing several functions to compute SPIKE-Synchronization profiles -and distances - -Copyright 2014-2015, Mario Mulansky - -Distributed under the BSD License -""" +# Module containing several functions to compute SPIKE-Synchronization profiles +# and distances +# Copyright 2014-2015, Mario Mulansky +# Distributed under the BSD License from functools import partial from pyspike import DiscreteFunc @@ -27,7 +22,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): :param spike_train2: Second spike train. :type spike_train2: :class:`pyspike.SpikeTrain` :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. + coincidence window has no upper bound. :returns: The spike-distance profile :math:`S_{sync}(t)`. :rtype: :class:`pyspike.function.DiscreteFunction` @@ -77,12 +72,13 @@ def spike_sync(spike_train1, spike_train2, interval=None, max_tau=None): :param spike_train2: Second spike train. :type spike_train2: :class:`pyspike.SpikeTrain` :param interval: averaging interval given as a pair of floats (T0, T1), - if None the average over the whole function is computed. + if `None` the average over the whole function is computed. :type interval: Pair of floats or None. :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. + coincidence window has no upper bound. :returns: The spike synchronization value. - :rtype: double + :rtype: `double` + """ return spike_sync_profile(spike_train1, spike_train2, max_tau).avrg(interval) @@ -103,7 +99,7 @@ def spike_sync_profile_multi(spike_trains, indices=None, max_tau=None): if None all given spike trains are used (default=None) :type indices: list or None :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. + coincidence window has no upper bound. :returns: The multi-variate spike sync profile :math:`(t)` :rtype: :class:`pyspike.function.DiscreteFunction` @@ -130,9 +126,10 @@ def spike_sync_multi(spike_trains, indices=None, interval=None, max_tau=None): the average over the whole function is computed. :type interval: Pair of floats or None. :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. + coincidence window has no upper bound. :returns: The multi-variate spike synchronization value SYNC. :rtype: double + """ return spike_sync_profile_multi(spike_trains, indices, max_tau).avrg(interval) @@ -153,10 +150,11 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): the average over the whole function is computed. :type interval: Pair of floats or None. :param max_tau: Maximum coincidence window size. If 0 or `None`, the - coincidence window has no upper bound. + coincidence window has no upper bound. :returns: 2D array with the pair wise time spike synchronization values :math:`SYNC_{ij}` :rtype: np.array + """ dist_func = partial(spike_sync, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, diff --git a/pyspike/spikes.py b/pyspike/spikes.py index 9401b6e..35d8533 100644 --- a/pyspike/spikes.py +++ b/pyspike/spikes.py @@ -1,11 +1,6 @@ -""" spikes.py - -Module containing several function to load and transform spike trains - -Copyright 2014, Mario Mulansky - -Distributed under the BSD License -""" +# Module containing several function to load and transform spike trains +# Copyright 2014, Mario Mulansky +# Distributed under the BSD License import numpy as np from pyspike import SpikeTrain @@ -15,21 +10,18 @@ from pyspike import SpikeTrain # spike_train_from_string ############################################################ def spike_train_from_string(s, edges, sep=' ', is_sorted=False): - """ Converts a string of times into a :class:`pyspike.SpikeTrain`. + """ Converts a string of times into a :class:`.SpikeTrain`. :param s: the string with (ordered) spike times. :param edges: interval defining the edges of the spike train. - Given as a pair of floats (T0, T1) or a single float T1, where T0=0 is - assumed. + Given as a pair of floats (T0, T1) or a single float T1, + where T0=0 is assumed. :param sep: The separator between the time numbers, default=' '. :param is_sorted: if True, the spike times are not sorted after loading, if False, spike times are sorted with `np.sort` - :returns: :class:`pyspike.SpikeTrain` + :returns: :class:`.SpikeTrain` """ - if not(is_sorted): - return SpikeTrain(np.sort(np.fromstring(s, sep=sep)), edges) - else: - return SpikeTrain(np.fromstring(s, sep=sep), edges) + return SpikeTrain(np.fromstring(s, sep=sep), edges, is_sorted) ############################################################ @@ -40,7 +32,7 @@ def load_spike_trains_from_txt(file_name, edges, """ Loads a number of spike trains from a text file. Each line of the text file should contain one spike train as a sequence of spike times separated by `separator`. Empty lines as well as lines starting with `comment` are - neglected. The `interval` represents the start and the end of the + neglected. The `edges` represents the start and the end of the spike trains. :param file_name: The name of the text file. @@ -51,7 +43,7 @@ def load_spike_trains_from_txt(file_name, edges, :param separator: The character used to seprate the values in the text file :param comment: Lines starting with this character are ignored. :param sort: If true, the spike times are order via `np.sort`, default=True - :returns: list of spike trains + :returns: list of :class:`.SpikeTrain` """ spike_trains = [] spike_file = open(file_name, 'r') @@ -70,7 +62,7 @@ def load_spike_trains_from_txt(file_name, edges, def merge_spike_trains(spike_trains): """ Merges a number of spike trains into a single spike train. - :param spike_trains: list of arrays of spike times + :param spike_trains: list of :class:`.SpikeTrain` :returns: spike train with the merged spike times """ # get the lengths of the spike trains @@ -110,7 +102,7 @@ def generate_poisson_spikes(rate, interval): to the spike train at the beginning and end of this interval, if they are not yet present. :type interval: pair of doubles or double - :returns: Poisson spike train as a :class:`pyspike.SpikeTrain` + :returns: Poisson spike train as a :class:`.SpikeTrain` """ try: T_start = interval[0] -- cgit v1.2.3 From 6f418a5a837939b132967bcdb3ff0ede6d899bd2 Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 12 May 2015 18:45:03 +0200 Subject: +functions to obtain values of the pwc/pwl profile Added __call__ operators to PieceWiseConst and PieceWiseLin class for obtaining function values at certain points in time. --- pyspike/PieceWiseConstFunc.py | 22 ++++++++++++++++++++++ pyspike/PieceWiseLinFunc.py | 30 ++++++++++++++++++++++++++++++ test/test_function.py | 32 +++++++++++++++++++++++++++++++- 3 files changed, 83 insertions(+), 1 deletion(-) (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 41998ef..cf64e58 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -26,6 +26,28 @@ class PieceWiseConstFunc(object): self.x = np.array(x) self.y = np.array(y) + def __call__(self, t): + """ Returns the function value for the given time t. If t is a list of + times, the corresponding list of values is returned. + + :param: time t, or list of times + :returns: function value(s) at that time(s). + """ + assert np.all(t >= self.x[0]) and np.all(t <= self.x[-1]), \ + "Invalid time: " + str(t) + + ind = np.searchsorted(self.x, t, side='right') + # correct the cases t == x[0], t == x[-1] + try: + ind[ind == 0] = 1 + ind[ind == len(self.x)] = len(self.x)-1 + except TypeError: + if ind == 0: + ind = 1 + if ind == len(self.x): + ind = len(self.x)-1 + return self.y[ind-1] + def copy(self): """ Returns a copy of itself diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index f2442be..b9787eb 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -29,6 +29,36 @@ class PieceWiseLinFunc: self.y1 = np.array(y1) self.y2 = np.array(y2) + def __call__(self, t): + """ Returns the function value for the given time t. If t is a list of + times, the corresponding list of values is returned. + + :param: time t, or list of times + :returns: function value(s) at that time(s). + """ + def intermediate_value(x0, x1, y0, y1, x): + """ computes the intermediate value of a linear function """ + return y0 + (y1-y0)*(x-x0)/(x1-x0) + + assert np.all(t >= self.x[0]) and np.all(t <= self.x[-1]), \ + "Invalid time: " + str(t) + + ind = np.searchsorted(self.x, t, side='right') + # correct the cases t == x[0], t == x[-1] + try: + ind[ind == 0] = 1 + ind[ind == len(self.x)] = len(self.x)-1 + except TypeError: + if ind == 0: + ind = 1 + if ind == len(self.x): + ind = len(self.x)-1 + return intermediate_value(self.x[ind-1], + self.x[ind], + self.y1[ind-1], + self.y2[ind-1], + t) + def copy(self): """ Returns a copy of itself diff --git a/test/test_function.py b/test/test_function.py index d81b03a..c56a295 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,7 +10,8 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy -from numpy.testing import assert_almost_equal, assert_array_almost_equal +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal, assert_array_almost_equal import pyspike as spk @@ -20,6 +21,17 @@ def test_pwc(): x = [0.0, 1.0, 2.0, 2.5, 4.0] y = [1.0, -0.5, 1.5, 0.75] f = spk.PieceWiseConstFunc(x, y) + + # function values + assert_equal(f(0.0), 1.0) + assert_equal(f(0.5), 1.0) + assert_equal(f(2.25), 1.5) + assert_equal(f(3.5), 0.75) + assert_equal(f(4.0), 0.75) + + assert_array_equal(f([0.0, 0.5, 2.25, 3.5, 4.0]), + [1.0, 1.0, 1.5, 0.75, 0.75]) + xp, yp = f.get_plottable_data() xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0] @@ -38,11 +50,17 @@ def test_pwc(): assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.5*0.75)/3.0, decimal=16) + a = f.avrg([0.0, 2.2]) + assert_almost_equal(a, (1.0*1.0-0.5*1.0+0.2*1.5)/2.2, decimal=15) # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 3.5)]) assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) + # averaging over multiple intervals + a = f.avrg([(0.5, 1.5), (2.2, 3.5)]) + assert_almost_equal(a, (0.5*1.0-0.5*0.5+0.3*1.5+1.0*0.75)/2.3, decimal=15) + def test_pwc_add(): # some random data @@ -105,6 +123,18 @@ def test_pwl(): y1 = [1.0, -0.5, 1.5, 0.75] y2 = [1.5, -0.4, 1.5, 0.25] f = spk.PieceWiseLinFunc(x, y1, y2) + + # function values + assert_equal(f(0.0), 1.0) + assert_equal(f(0.5), 1.25) + assert_equal(f(2.25), 1.5) + assert_equal(f(2.5), 0.75) + assert_equal(f(3.5), 0.75-0.5*1.0/1.5) + assert_equal(f(4.0), 0.25) + + assert_array_equal(f([0.0, 0.5, 2.25, 2.5, 3.5, 4.0]), + [1.0, 1.25, 1.5, 0.75, 0.75-0.5*1.0/1.5, 0.25]) + xp, yp = f.get_plottable_data() xp_expected = [0.0, 1.0, 1.0, 2.0, 2.0, 2.5, 2.5, 4.0] -- cgit v1.2.3 From 8841138b74242ed9eb77c972c76e9a617778a79a Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Wed, 13 May 2015 18:19:02 +0200 Subject: pwc function now returns intermediate value at exact spike times --- pyspike/PieceWiseConstFunc.py | 35 +++++++++++++++++++++++++++-------- test/test_function.py | 6 ++++-- 2 files changed, 31 insertions(+), 10 deletions(-) (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index cf64e58..dea1a56 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -37,16 +37,35 @@ class PieceWiseConstFunc(object): "Invalid time: " + str(t) ind = np.searchsorted(self.x, t, side='right') - # correct the cases t == x[0], t == x[-1] - try: + if isinstance(t, collections.Sequence): + # t is a sequence of values + # correct the cases t == x[0], t == x[-1] ind[ind == 0] = 1 ind[ind == len(self.x)] = len(self.x)-1 - except TypeError: - if ind == 0: - ind = 1 - if ind == len(self.x): - ind = len(self.x)-1 - return self.y[ind-1] + value = self.y[ind-1] + # correct the values at exact spike times: there the value should + # be the at half of the step + # obtain the 'left' side indices for t + ind_l = np.searchsorted(self.x, t, side='left') + # if left and right side indices differ, the time t has to appear + # in self.x + ind_at_spike = ind[np.logical_and(np.logical_and(ind != ind_l, + ind > 1), + ind < len(self.x))] + value[ind_at_spike] = 0.5 * (self.y[ind_at_spike-1] + + self.y[ind_at_spike-2]) + return value + else: + # specific check for interval edges + if t == self.x[0]: + return self.y[0] + if t == self.x[-1]: + return self.y[-1] + # check if we are on any other exact spike time + if sum(self.x == t) > 0: + # use the middle of the left and right ISI value + return 0.5 * (self.y[ind-1] + self.y[ind-2]) + return self.y[ind-1] def copy(self): """ Returns a copy of itself diff --git a/test/test_function.py b/test/test_function.py index c56a295..8ad4b17 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -25,12 +25,14 @@ def test_pwc(): # function values assert_equal(f(0.0), 1.0) assert_equal(f(0.5), 1.0) + assert_equal(f(1.0), 0.25) assert_equal(f(2.25), 1.5) + assert_equal(f(2.5), 2.25/2) assert_equal(f(3.5), 0.75) assert_equal(f(4.0), 0.75) - assert_array_equal(f([0.0, 0.5, 2.25, 3.5, 4.0]), - [1.0, 1.0, 1.5, 0.75, 0.75]) + assert_array_equal(f([0.0, 0.5, 1.0, 2.25, 2.5, 3.5, 4.0]), + [1.0, 1.0, 0.25, 1.5, 2.25/2, 0.75, 0.75]) xp, yp = f.get_plottable_data() -- cgit v1.2.3 From a61a14295e28e6e95fa510693a11ae8c78a552ab Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Sun, 17 May 2015 17:50:39 +0200 Subject: return correct values at exact spike times pwc and pwl function object return the average of the left and right limit as function value at the exact spike times. --- pyspike/PieceWiseConstFunc.py | 15 ++++++++----- pyspike/PieceWiseLinFunc.py | 51 +++++++++++++++++++++++++++++++++---------- test/test_function.py | 13 ++++++----- 3 files changed, 56 insertions(+), 23 deletions(-) (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index dea1a56..6d7a845 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -49,13 +49,16 @@ class PieceWiseConstFunc(object): ind_l = np.searchsorted(self.x, t, side='left') # if left and right side indices differ, the time t has to appear # in self.x - ind_at_spike = ind[np.logical_and(np.logical_and(ind != ind_l, - ind > 1), - ind < len(self.x))] - value[ind_at_spike] = 0.5 * (self.y[ind_at_spike-1] + - self.y[ind_at_spike-2]) + ind_at_spike = np.logical_and(np.logical_and(ind != ind_l, + ind > 1), + ind < len(self.x)) + # get the corresponding indices for the resulting value array + val_ind = np.arange(len(ind))[ind_at_spike] + # and for the arrays self.x, y1, y2 + xy_ind = ind[ind_at_spike] + value[val_ind] = 0.5 * (self.y[xy_ind-1] + self.y[xy_ind-2]) return value - else: + else: # t is a single value # specific check for interval edges if t == self.x[0]: return self.y[0] diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index b9787eb..03c2da2 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -44,20 +44,47 @@ class PieceWiseLinFunc: "Invalid time: " + str(t) ind = np.searchsorted(self.x, t, side='right') - # correct the cases t == x[0], t == x[-1] - try: + if isinstance(t, collections.Sequence): + # t is a sequence of values + # correct the cases t == x[0], t == x[-1] ind[ind == 0] = 1 ind[ind == len(self.x)] = len(self.x)-1 - except TypeError: - if ind == 0: - ind = 1 - if ind == len(self.x): - ind = len(self.x)-1 - return intermediate_value(self.x[ind-1], - self.x[ind], - self.y1[ind-1], - self.y2[ind-1], - t) + value = intermediate_value(self.x[ind-1], + self.x[ind], + self.y1[ind-1], + self.y2[ind-1], + t) + # correct the values at exact spike times: there the value should + # be the at half of the step + # obtain the 'left' side indices for t + ind_l = np.searchsorted(self.x, t, side='left') + # if left and right side indices differ, the time t has to appear + # in self.x + ind_at_spike = np.logical_and(np.logical_and(ind != ind_l, + ind > 1), + ind < len(self.x)) + # get the corresponding indices for the resulting value array + val_ind = np.arange(len(ind))[ind_at_spike] + # and for the values in self.x, y1, y2 + xy_ind = ind[ind_at_spike] + # the values are defined as the average of the left and right limit + value[val_ind] = 0.5 * (self.y1[xy_ind-1] + self.y2[xy_ind-2]) + return value + else: # t is a single value + # specific check for interval edges + if t == self.x[0]: + return self.y1[0] + if t == self.x[-1]: + return self.y2[-1] + # check if we are on any other exact spike time + if sum(self.x == t) > 0: + # use the middle of the left and right Spike value + return 0.5 * (self.y1[ind-1] + self.y2[ind-2]) + return intermediate_value(self.x[ind-1], + self.x[ind], + self.y1[ind-1], + self.y2[ind-1], + t) def copy(self): """ Returns a copy of itself diff --git a/test/test_function.py b/test/test_function.py index 8ad4b17..92d378d 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -26,13 +26,14 @@ def test_pwc(): assert_equal(f(0.0), 1.0) assert_equal(f(0.5), 1.0) assert_equal(f(1.0), 0.25) + assert_equal(f(2.0), 0.5) assert_equal(f(2.25), 1.5) assert_equal(f(2.5), 2.25/2) assert_equal(f(3.5), 0.75) assert_equal(f(4.0), 0.75) - assert_array_equal(f([0.0, 0.5, 1.0, 2.25, 2.5, 3.5, 4.0]), - [1.0, 1.0, 0.25, 1.5, 2.25/2, 0.75, 0.75]) + assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]), + [1.0, 1.0, 0.25, 0.5, 1.5, 2.25/2, 0.75, 0.75]) xp, yp = f.get_plottable_data() @@ -129,13 +130,15 @@ def test_pwl(): # function values assert_equal(f(0.0), 1.0) assert_equal(f(0.5), 1.25) + assert_equal(f(1.0), 0.5) + assert_equal(f(2.0), 1.1/2) assert_equal(f(2.25), 1.5) - assert_equal(f(2.5), 0.75) + assert_equal(f(2.5), 2.25/2) assert_equal(f(3.5), 0.75-0.5*1.0/1.5) assert_equal(f(4.0), 0.25) - assert_array_equal(f([0.0, 0.5, 2.25, 2.5, 3.5, 4.0]), - [1.0, 1.25, 1.5, 0.75, 0.75-0.5*1.0/1.5, 0.25]) + assert_array_equal(f([0.0, 0.5, 1.0, 2.0, 2.25, 2.5, 3.5, 4.0]), + [1.0, 1.25, 0.5, 0.55, 1.5, 2.25/2, 0.75-0.5/1.5, 0.25]) xp, yp = f.get_plottable_data() -- cgit v1.2.3 From c8f524db6e1add464aa3596ea46534c21096589e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Mon, 18 May 2015 15:08:24 +0200 Subject: cosmetics --- pyspike/PieceWiseConstFunc.py | 2 ++ pyspike/PieceWiseLinFunc.py | 1 + 2 files changed, 3 insertions(+) (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 6d7a845..2705443 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -37,6 +37,7 @@ class PieceWiseConstFunc(object): "Invalid time: " + str(t) ind = np.searchsorted(self.x, t, side='right') + if isinstance(t, collections.Sequence): # t is a sequence of values # correct the cases t == x[0], t == x[-1] @@ -56,6 +57,7 @@ class PieceWiseConstFunc(object): val_ind = np.arange(len(ind))[ind_at_spike] # and for the arrays self.x, y1, y2 xy_ind = ind[ind_at_spike] + # use the middle of the left and right ISI value value[val_ind] = 0.5 * (self.y[xy_ind-1] + self.y[xy_ind-2]) return value else: # t is a single value diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 03c2da2..c0dd475 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -44,6 +44,7 @@ class PieceWiseLinFunc: "Invalid time: " + str(t) ind = np.searchsorted(self.x, t, side='right') + if isinstance(t, collections.Sequence): # t is a sequence of values # correct the cases t == x[0], t == x[-1] -- cgit v1.2.3 From 5119d47d0f00c3f7203cf94460730b59a7e473ec Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Tue, 7 Jul 2015 18:55:32 +0200 Subject: add disable_backend_warning property Users can now disable the warning messages produced when the cython backend is not available by writing spk.disable_backend_warning = True in the beginning --- examples/performance.py | 3 +++ examples/plot.py | 1 + pyspike/DiscreteFunc.py | 4 +++- pyspike/PieceWiseConstFunc.py | 7 +++++-- pyspike/PieceWiseLinFunc.py | 9 ++++++--- pyspike/__init__.py | 2 ++ pyspike/isi_distance.py | 6 ++++-- pyspike/spike_distance.py | 4 +++- pyspike/spike_sync.py | 4 +++- 9 files changed, 30 insertions(+), 10 deletions(-) (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/examples/performance.py b/examples/performance.py index 1c31e8f..d0c3b91 100644 --- a/examples/performance.py +++ b/examples/performance.py @@ -14,6 +14,9 @@ from datetime import datetime import cProfile import pstats +# in case you dont have the cython backends, disable the warnings as follows: +# spk.disable_backend_warning = True + M = 100 # number of spike trains r = 1.0 # rate of Poisson spike times T = 1E3 # length of spike trains diff --git a/examples/plot.py b/examples/plot.py index c44afd1..1922939 100644 --- a/examples/plot.py +++ b/examples/plot.py @@ -16,6 +16,7 @@ import matplotlib.pyplot as plt import pyspike as spk + spike_trains = spk.load_spike_trains_from_txt("PySpike_testdata.txt", edges=(0, 4000)) diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 17153ee..a8c054e 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -202,7 +203,8 @@ class DiscreteFunc(object): from cython.cython_add import add_discrete_function_cython as \ add_discrete_function_impl except ImportError: - print("Warning: add_discrete_function_cython not found. Make \ + if not(pyspike.disable_backend_warning): + print("Warning: add_discrete_function_cython not found. Make \ sure that PySpike is installed by running\n\ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 2705443..23ff536 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -191,8 +192,10 @@ class PieceWiseConstFunc(object): from cython.cython_add import add_piece_wise_const_cython as \ add_piece_wise_const_impl except ImportError: - print("Warning: add_piece_wise_const_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ + if not(pyspike.disable_backend_warning): + print("Warning: add_piece_wise_const_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend from cython.python_backend import add_piece_wise_const_python as \ diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index c0dd475..0d51c76 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -6,6 +6,7 @@ from __future__ import print_function import numpy as np import collections +import pyspike ############################################################## @@ -230,9 +231,11 @@ class PieceWiseLinFunc: from cython.cython_add import add_piece_wise_lin_cython as \ add_piece_wise_lin_impl except ImportError: - print("Warning: add_piece_wise_lin_cython not found. Make sure \ -that PySpike is installed by running\n 'python setup.py build_ext --inplace'! \ -\n Falling back to slow python backend.") + if not(pyspike.disable_backend_warning): + print("Warning: add_piece_wise_lin_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'! \n \ +Falling back to slow python backend.") # use python backend from cython.python_backend import add_piece_wise_lin_python as \ add_piece_wise_lin_impl diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 3e836bd..2060f73 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -42,3 +42,5 @@ except DistributionNotFound: __version__ = 'Please install this project with setup.py' else: __version__ = _dist.version + +disable_backend_warning = False diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index 5ea555d..e50f203 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -2,6 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +import pyspike from pyspike import PieceWiseConstFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ _generic_distance_matrix @@ -34,8 +35,9 @@ def isi_profile(spike_train1, spike_train2): from cython.cython_profiles import isi_profile_cython \ as isi_profile_impl except ImportError: - print("Warning: isi_distance_cython not found. Make sure that PySpike \ -is installed by running\n 'python setup.py build_ext --inplace'!\n \ + if not(pyspike.disable_backend_warning): + print("Warning: isi_profile_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend from cython.python_backend import isi_distance_python \ diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index dd6d4f8..feea0c1 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -2,6 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +import pyspike from pyspike import PieceWiseLinFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ _generic_distance_matrix @@ -34,7 +35,8 @@ def spike_profile(spike_train1, spike_train2): from cython.cython_profiles import spike_profile_cython \ as spike_profile_impl except ImportError: - print("Warning: spike_profile_cython not found. Make sure that \ + if not(pyspike.disable_backend_warning): + print("Warning: spike_profile_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 40d98d2..10ebdc7 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -5,6 +5,7 @@ import numpy as np from functools import partial +import pyspike from pyspike import DiscreteFunc from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -39,7 +40,8 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): from cython.cython_profiles import coincidence_profile_cython \ as coincidence_profile_impl except ImportError: - print("Warning: spike_distance_cython not found. Make sure that \ + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend -- cgit v1.2.3 From eeb4918ec2181f136e85bce976ec46a35a74b8f1 Mon Sep 17 00:00:00 2001 From: Igor Gnatenko Date: Sun, 13 Dec 2015 10:55:30 +0100 Subject: py3: absolute_import Signed-off-by: Igor Gnatenko --- pyspike/DiscreteFunc.py | 6 +++--- pyspike/PieceWiseConstFunc.py | 6 +++--- pyspike/PieceWiseLinFunc.py | 8 ++++---- pyspike/__init__.py | 22 ++++++++++++---------- pyspike/directionality/__init__.py | 4 +++- pyspike/directionality/spike_train_order.py | 12 +++++++----- pyspike/isi_distance.py | 8 +++++--- pyspike/spike_distance.py | 8 +++++--- pyspike/spike_sync.py | 8 +++++--- 9 files changed, 47 insertions(+), 35 deletions(-) (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/pyspike/DiscreteFunc.py b/pyspike/DiscreteFunc.py index 9cc7bd5..55c0bc8 100644 --- a/pyspike/DiscreteFunc.py +++ b/pyspike/DiscreteFunc.py @@ -2,7 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License -from __future__ import print_function +from __future__ import absolute_import, print_function import numpy as np import collections @@ -206,7 +206,7 @@ expected." # cython version try: - from cython.cython_add import add_discrete_function_cython as \ + from .cython.cython_add import add_discrete_function_cython as \ add_discrete_function_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -215,7 +215,7 @@ sure that PySpike is installed by running\n\ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from cython.python_backend import add_discrete_function_python as \ + from .cython.python_backend import add_discrete_function_python as \ add_discrete_function_impl self.x, self.y, self.mp = \ diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 23ff536..5ce5f27 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -2,7 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License -from __future__ import print_function +from __future__ import absolute_import, print_function import numpy as np import collections @@ -189,7 +189,7 @@ class PieceWiseConstFunc(object): # cython version try: - from cython.cython_add import add_piece_wise_const_cython as \ + from .cython.cython_add import add_piece_wise_const_cython as \ add_piece_wise_const_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -198,7 +198,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'! \ \n Falling back to slow python backend.") # use python backend - from cython.python_backend import add_piece_wise_const_python as \ + from .cython.python_backend import add_piece_wise_const_python as \ add_piece_wise_const_impl self.x, self.y = add_piece_wise_const_impl(self.x, self.y, f.x, f.y) diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 0d51c76..8145e63 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -2,7 +2,7 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License -from __future__ import print_function +from __future__ import absolute_import, print_function import numpy as np import collections @@ -222,13 +222,13 @@ class PieceWiseLinFunc: assert self.x[-1] == f.x[-1], "The functions have different intervals" # python implementation - # from python_backend import add_piece_wise_lin_python + # from .python_backend import add_piece_wise_lin_python # self.x, self.y1, self.y2 = add_piece_wise_lin_python( # self.x, self.y1, self.y2, f.x, f.y1, f.y2) # cython version try: - from cython.cython_add import add_piece_wise_lin_cython as \ + from .cython.cython_add import add_piece_wise_lin_cython as \ add_piece_wise_lin_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -237,7 +237,7 @@ sure that PySpike is installed by running\n \ 'python setup.py build_ext --inplace'! \n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import add_piece_wise_lin_python as \ + from .cython.python_backend import add_piece_wise_lin_python as \ add_piece_wise_lin_impl self.x, self.y1, self.y2 = add_piece_wise_lin_impl( diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 8d92ea4..335b1d3 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -4,27 +4,29 @@ Copyright 2014-2015, Mario Mulansky Distributed under the BSD License """ +from __future__ import absolute_import + __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc", "directionality"] -from PieceWiseConstFunc import PieceWiseConstFunc -from PieceWiseLinFunc import PieceWiseLinFunc -from DiscreteFunc import DiscreteFunc -from SpikeTrain import SpikeTrain +from .PieceWiseConstFunc import PieceWiseConstFunc +from .PieceWiseLinFunc import PieceWiseLinFunc +from .DiscreteFunc import DiscreteFunc +from .SpikeTrain import SpikeTrain -from isi_distance import isi_profile, isi_distance, isi_profile_multi,\ +from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ isi_distance_multi, isi_distance_matrix -from spike_distance import spike_profile, spike_distance, spike_profile_multi,\ +from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix -from spike_sync import spike_sync_profile, spike_sync,\ +from .spike_sync import spike_sync_profile, spike_sync,\ spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix -from psth import psth +from .psth import psth -from spikes import load_spike_trains_from_txt, spike_train_from_string, \ +from .spikes import load_spike_trains_from_txt, spike_train_from_string, \ merge_spike_trains, generate_poisson_spikes -import directionality as drct +from . import directionality as drct # define the __version__ following # http://stackoverflow.com/questions/17583443 diff --git a/pyspike/directionality/__init__.py b/pyspike/directionality/__init__.py index 6f74c50..6ea38b2 100644 --- a/pyspike/directionality/__init__.py +++ b/pyspike/directionality/__init__.py @@ -4,9 +4,11 @@ Copyright 2015, Mario Mulansky Distributed under the BSD License """ +from __future__ import absolute_import + __all__ = ["spike_train_order"] -from spike_train_order import spike_train_order_profile, \ +from .spike_train_order import spike_train_order_profile, \ spike_train_order, spike_train_order_profile_multi, \ spike_train_order_matrix, spike_order_values, \ optimal_spike_train_order, optimal_spike_train_order_from_matrix, \ diff --git a/pyspike/directionality/spike_train_order.py b/pyspike/directionality/spike_train_order.py index 892ffd0..44d931d 100644 --- a/pyspike/directionality/spike_train_order.py +++ b/pyspike/directionality/spike_train_order.py @@ -2,6 +2,8 @@ # Copyright 2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import numpy as np from math import exp from functools import partial @@ -35,7 +37,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_profile_cython as \ spike_train_order_profile_impl except ImportError: @@ -45,7 +47,7 @@ def spike_train_order_profile(spike_train1, spike_train2, max_tau=None): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.directionality_python_backend import \ + from .cython.directionality_python_backend import \ spike_train_order_python as spike_train_order_profile_impl if max_tau is None: @@ -72,7 +74,7 @@ def spike_train_order(spike_train1, spike_train2, normalize=True, # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_train_order_cython as spike_train_order_impl if max_tau is None: max_tau = 0.0 @@ -170,7 +172,7 @@ def spike_order_values(spike_trains, indices=None, # cython implementation try: - from cython.cython_directionality import \ + from .cython.cython_directionality import \ spike_order_values_cython as spike_order_values_impl except ImportError: raise NotImplementedError() @@ -179,7 +181,7 @@ def spike_order_values(spike_trains, indices=None, # PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ # Falling back to slow python backend.") # # use python backend -# from cython.python_backend import coincidence_python \ +# from .cython.python_backend import coincidence_python \ # as coincidence_profile_impl if max_tau is None: diff --git a/pyspike/isi_distance.py b/pyspike/isi_distance.py index e50f203..0ae7393 100644 --- a/pyspike/isi_distance.py +++ b/pyspike/isi_distance.py @@ -2,6 +2,8 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import pyspike from pyspike import PieceWiseConstFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ @@ -32,7 +34,7 @@ def isi_profile(spike_train1, spike_train2): # load cython implementation try: - from cython.cython_profiles import isi_profile_cython \ + from .cython.cython_profiles import isi_profile_cython \ as isi_profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -40,7 +42,7 @@ def isi_profile(spike_train1, spike_train2): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import isi_distance_python \ + from .cython.python_backend import isi_distance_python \ as isi_profile_impl times, values = isi_profile_impl(spike_train1.get_spikes_non_empty(), @@ -74,7 +76,7 @@ def isi_distance(spike_train1, spike_train2, interval=None): # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_distances import isi_distance_cython \ + from .cython.cython_distances import isi_distance_cython \ as isi_distance_impl return isi_distance_impl(spike_train1.get_spikes_non_empty(), diff --git a/pyspike/spike_distance.py b/pyspike/spike_distance.py index feea0c1..e418283 100644 --- a/pyspike/spike_distance.py +++ b/pyspike/spike_distance.py @@ -2,6 +2,8 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import pyspike from pyspike import PieceWiseLinFunc from pyspike.generic import _generic_profile_multi, _generic_distance_multi, \ @@ -32,7 +34,7 @@ def spike_profile(spike_train1, spike_train2): # cython implementation try: - from cython.cython_profiles import spike_profile_cython \ + from .cython.cython_profiles import spike_profile_cython \ as spike_profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -40,7 +42,7 @@ def spike_profile(spike_train1, spike_train2): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import spike_distance_python \ + from .cython.python_backend import spike_distance_python \ as spike_profile_impl times, y_starts, y_ends = spike_profile_impl( @@ -76,7 +78,7 @@ def spike_distance(spike_train1, spike_train2, interval=None): # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_distances import spike_distance_cython \ + from .cython.cython_distances import spike_distance_cython \ as spike_distance_impl return spike_distance_impl(spike_train1.get_spikes_non_empty(), spike_train2.get_spikes_non_empty(), diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 10ebdc7..3dc29ff 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -3,6 +3,8 @@ # Copyright 2014-2015, Mario Mulansky # Distributed under the BSD License +from __future__ import absolute_import + import numpy as np from functools import partial import pyspike @@ -37,7 +39,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): # cython implementation try: - from cython.cython_profiles import coincidence_profile_cython \ + from .cython.cython_profiles import coincidence_profile_cython \ as coincidence_profile_impl except ImportError: if not(pyspike.disable_backend_warning): @@ -45,7 +47,7 @@ def spike_sync_profile(spike_train1, spike_train2, max_tau=None): PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ Falling back to slow python backend.") # use python backend - from cython.python_backend import coincidence_python \ + from .cython.python_backend import coincidence_python \ as coincidence_profile_impl if max_tau is None: @@ -73,7 +75,7 @@ def _spike_sync_values(spike_train1, spike_train2, interval, max_tau): # distance over the whole interval is requested: use specific function # for optimal performance try: - from cython.cython_distances import coincidence_value_cython \ + from .cython.cython_distances import coincidence_value_cython \ as coincidence_value_impl if max_tau is None: max_tau = 0.0 -- cgit v1.2.3 From 34bd30415dd93a2425ce566627e24ee9483ada3e Mon Sep 17 00:00:00 2001 From: Mario Mulansky Date: Thu, 20 Sep 2018 10:49:42 -0700 Subject: Spike Order support (#39) * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * reorganized directionality module * further refactoring of directionality * completed python directionality backend * added SPIKE-Sync based filtering new function filter_by_spike_sync removes spikes that have a multi-variate Spike Sync value below some threshold not yet fully tested, python backend missing. * spike sync filtering, cython sim ann Added function for filtering out events based on a threshold for the spike sync values. Usefull for focusing on synchronous events during directionality analysis. Also added cython version of simulated annealing for performance. * added coincidence single profile to python backend missing function in python backend added, identified and fixed a bug in the implementation as well * updated test case to new spike sync behavior * python3 fixes * another python3 fix * Fix absolute imports in directionality measures * remove commented code * Add directionality to docs, bump version * Clean up directionality module, add doxy. * Remove debug print from tests * Fix bug in calling Python backend * Fix incorrect integrals in PieceWiseConstFunc (#36) * Add (some currently failing) tests for PieceWiseConstFunc.integral * Fix implementation of PieceWiseConstFunc.integral Just by adding a special condition for when we are only taking an integral "between" two edges of a PieceWiseConstFunc All tests now pass. Fixes #33. * Add PieceWiseConstFunc.integral tests for ValueError * Add testing bounds of integral * Raise ValueError in function implementation * Fix incorrect integrals in PieceWiseLinFunc (#38) Integrals of piece-wise linear functions were incorrect if the requested interval lies completely between two support points. This has been fixed, and a unit test exercising this behavior was added. Fixes #38 * Add Spike Order example and Tutorial section Adds an example computing spike order profile and the optimal spike train order. Also adds a section on spike train order to the tutorial. --- Changelog | 3 + Readme.rst | 9 +- doc/pyspike.rst | 6 + doc/tutorial.rst | 66 +++ examples/spike_train_order.py | 52 +++ pyspike/PieceWiseConstFunc.py | 32 +- pyspike/PieceWiseLinFunc.py | 42 +- pyspike/__init__.py | 16 +- pyspike/cython/cython_directionality.pyx | 262 ++++++++++++ pyspike/cython/cython_distances.pyx | 200 +++++++++ pyspike/cython/cython_profiles.pyx | 33 ++ pyspike/cython/cython_simulated_annealing.pyx | 82 ++++ pyspike/cython/directionality_python_backend.py | 144 +++++++ pyspike/cython/python_backend.py | 67 ++- pyspike/spike_directionality.py | 522 ++++++++++++++++++++++++ pyspike/spike_sync.py | 55 ++- setup.py | 28 +- test/test_directionality.py | 97 +++++ test/test_function.py | 62 +++ test/test_sync_filter.py | 95 +++++ 20 files changed, 1812 insertions(+), 61 deletions(-) create mode 100644 examples/spike_train_order.py create mode 100644 pyspike/cython/cython_directionality.pyx create mode 100644 pyspike/cython/cython_simulated_annealing.pyx create mode 100644 pyspike/cython/directionality_python_backend.py create mode 100644 pyspike/spike_directionality.py create mode 100644 test/test_directionality.py create mode 100644 test/test_sync_filter.py (limited to 'pyspike/PieceWiseConstFunc.py') diff --git a/Changelog b/Changelog index 21b7cb0..88e16cc 100644 --- a/Changelog +++ b/Changelog @@ -1,3 +1,6 @@ +PySpike v0.6: + * Support for computing spike directionality and spike train order + PySpike v0.5: * First beta release * Python 2.6 support removed diff --git a/Readme.rst b/Readme.rst index 0422dad..74b014b 100644 --- a/Readme.rst +++ b/Readme.rst @@ -31,19 +31,14 @@ Additionally, depending on the used methods: ISI-distance [1], SPIKE-distance [2 Important Changelog ----------------------------- +With version 0.6.0, the spike directionality and spike train order function have been added. + With version 0.5.0, the interfaces have been unified and the specific functions for multivariate computations have become deprecated. With version 0.2.0, the :code:`SpikeTrain` class has been introduced to represent spike trains. This is a breaking change in the function interfaces. Hence, programs written for older versions of PySpike (0.1.x) will not run with newer versions. - -Upcoming Functionality -------------------------- - -In an upcoming release, new functionality for analyzing Synfire patterns based on the new measures SPIKE-Order and Spike-Train-Order method will become part of the PySpike library. -The new measures and algorithms are described in `this preprint `_. - Requirements and Installation ----------------------------- diff --git a/doc/pyspike.rst b/doc/pyspike.rst index 74ab439..3b10d2a 100644 --- a/doc/pyspike.rst +++ b/doc/pyspike.rst @@ -64,6 +64,12 @@ PSTH :undoc-members: :show-inheritance: +Directionality +........................................ +.. automodule:: pyspike.spike_directionality + :members: + :undoc-members: + :show-inheritance: Helper functions ........................................ diff --git a/doc/tutorial.rst b/doc/tutorial.rst index aff03a8..377c0a2 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -231,3 +231,69 @@ The following example computes and plots the ISI- and SPIKE-distance matrix as w plt.title("SPIKE-Sync") plt.show() + + +Quantifying Leaders and Followers: Spike Train Order +--------------------------------------- + +PySpike provides functionality to quantify how much a set of spike trains +resembles a synfire pattern (ie perfect leader-follower pattern). For details +on the algorithms please see +`our article in NJP `_. + +The following example computes the Spike Order profile and Synfire Indicator +of two Poissonian spike trains. + +.. code:: python + import numpy as np + from matplotlib import pyplot as plt + import pyspike as spk + + + st1 = spk.generate_poisson_spikes(1.0, [0, 20]) + st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + + d = spk.spike_directionality(st1, st2) + + print "Spike Directionality of two Poissonian spike trains:", d + + E = spk.spike_train_order_profile(st1, st2) + + plt.figure() + x, y = E.get_plottable_data() + plt.plot(x, y, '-ob') + plt.ylim(-1.1, 1.1) + plt.xlabel("t") + plt.ylabel("E") + plt.title("Spike Train Order Profile") + + plt.show() + +Additionally, PySpike can also compute the optimal ordering of the spike trains, +ie the ordering that most resembles a synfire pattern. The following example +computes the optimal order of a set of 20 Poissonian spike trains: + +.. code:: python + + M = 20 + spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + + F_init = spk.spike_train_order(spike_trains) + print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + + D_init = spk.spike_directionality_matrix(spike_trains) + phi, _ = spk.optimal_spike_train_sorting(spike_trains) + F_opt = spk.spike_train_order(spike_trains, indices=phi) + print "Synfire Indicator of optimized spike train sorting:", F_opt + + D_opt = spk.permutate_matrix(D_init, phi) + + plt.figure() + plt.imshow(D_init) + plt.title("Initial Directionality Matrix") + + plt.figure() + plt.imshow(D_opt) + plt.title("Optimized Directionality Matrix") + + plt.show() diff --git a/examples/spike_train_order.py b/examples/spike_train_order.py new file mode 100644 index 0000000..3a42472 --- /dev/null +++ b/examples/spike_train_order.py @@ -0,0 +1,52 @@ +import numpy as np +from matplotlib import pyplot as plt +import pyspike as spk + + +st1 = spk.generate_poisson_spikes(1.0, [0, 20]) +st2 = spk.generate_poisson_spikes(1.0, [0, 20]) + +d = spk.spike_directionality(st1, st2) + +print "Spike Directionality of two Poissonian spike trains:", d + +E = spk.spike_train_order_profile(st1, st2) + +plt.figure() +x, y = E.get_plottable_data() +plt.plot(x, y, '-ob') +plt.ylim(-1.1, 1.1) +plt.xlabel("t") +plt.ylabel("E") +plt.title("Spike Train Order Profile") + + +###### Optimize spike train order of 20 Random spike trains ####### + +M = 20 + +spike_trains = [spk.generate_poisson_spikes(1.0, [0, 100]) for m in xrange(M)] + +F_init = spk.spike_train_order(spike_trains) + +print "Initial Synfire Indicator for 20 Poissonian spike trains:", F_init + +D_init = spk.spike_directionality_matrix(spike_trains) + +phi, _ = spk.optimal_spike_train_sorting(spike_trains) + +F_opt = spk.spike_train_order(spike_trains, indices=phi) + +print "Synfire Indicator of optimized spike train sorting:", F_opt + +D_opt = spk.permutate_matrix(D_init, phi) + +plt.figure() +plt.imshow(D_init) +plt.title("Initial Directionality Matrix") + +plt.figure() +plt.imshow(D_opt) +plt.title("Optimized Directionality Matrix") + +plt.show() diff --git a/pyspike/PieceWiseConstFunc.py b/pyspike/PieceWiseConstFunc.py index 5ce5f27..17fdd3f 100644 --- a/pyspike/PieceWiseConstFunc.py +++ b/pyspike/PieceWiseConstFunc.py @@ -129,19 +129,31 @@ class PieceWiseConstFunc(object): # no interval given, integrate over the whole spike train a = np.sum((self.x[1:]-self.x[:-1]) * self.y) else: + if interval[0]>interval[1]: + raise ValueError("Invalid averaging interval: interval[0]>=interval[1]") + if interval[0]self.x[-1]: + raise ValueError("Invalid averaging interval: interval[0] 0 and end_ind < len(self.x), \ - "Invalid averaging interval" - # first the contribution from between the indices - a = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - self.y[start_ind:end_ind]) - # correction from start to first index - a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] - # correction from last index to end - a += (interval[1]-self.x[end_ind]) * self.y[end_ind] + if start_ind > end_ind: + # contribution from between two closest edges + a = (self.x[start_ind]-self.x[end_ind]) * self.y[end_ind] + # minus the part that is not within the interval + a -= ((interval[0]-self.x[end_ind])+(self.x[start_ind]-interval[1])) * self.y[end_ind] + else: + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + # first the contribution from between the indices + a = np.sum((self.x[start_ind+1:end_ind+1] - + self.x[start_ind:end_ind]) * + self.y[start_ind:end_ind]) + # correction from start to first index + a += (self.x[start_ind]-interval[0]) * self.y[start_ind-1] + # correction from last index to end + a += (interval[1]-self.x[end_ind]) * self.y[end_ind] return a def avrg(self, interval=None): diff --git a/pyspike/PieceWiseLinFunc.py b/pyspike/PieceWiseLinFunc.py index 8145e63..8faaec4 100644 --- a/pyspike/PieceWiseLinFunc.py +++ b/pyspike/PieceWiseLinFunc.py @@ -146,31 +146,47 @@ class PieceWiseLinFunc: if interval is None: # no interval given, integrate over the whole spike train - integral = np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + return np.sum((self.x[1:]-self.x[:-1]) * 0.5*(self.y1+self.y2)) + + # find the indices corresponding to the interval + start_ind = np.searchsorted(self.x, interval[0], side='right') + end_ind = np.searchsorted(self.x, interval[1], side='left')-1 + assert start_ind > 0 and end_ind < len(self.x), \ + "Invalid averaging interval" + if start_ind > end_ind: + print(start_ind, end_ind, self.x[start_ind]) + # contribution from between two closest edges + y_x0 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[0]) + y_x1 = intermediate_value(self.x[start_ind-1], + self.x[start_ind], + self.y1[start_ind-1], + self.y2[start_ind-1], + interval[1]) + print(y_x0, y_x1, interval[1] - interval[0]) + integral = (y_x0 + y_x1) * 0.5 * (interval[1] - interval[0]) + print(integral) else: - # find the indices corresponding to the interval - start_ind = np.searchsorted(self.x, interval[0], side='right') - end_ind = np.searchsorted(self.x, interval[1], side='left')-1 - assert start_ind > 0 and end_ind < len(self.x), \ - "Invalid averaging interval" # first the contribution from between the indices integral = np.sum((self.x[start_ind+1:end_ind+1] - - self.x[start_ind:end_ind]) * - 0.5*(self.y1[start_ind:end_ind] + - self.y2[start_ind:end_ind])) + self.x[start_ind:end_ind]) * + 0.5*(self.y1[start_ind:end_ind] + + self.y2[start_ind:end_ind])) # correction from start to first index integral += (self.x[start_ind]-interval[0]) * 0.5 * \ (self.y2[start_ind-1] + - intermediate_value(self.x[start_ind-1], + intermediate_value(self.x[start_ind-1], self.x[start_ind], self.y1[start_ind-1], self.y2[start_ind-1], - interval[0] - )) + interval[0])) # correction from last index to end integral += (interval[1]-self.x[end_ind]) * 0.5 * \ (self.y1[end_ind] + - intermediate_value(self.x[end_ind], self.x[end_ind+1], + intermediate_value(self.x[end_ind], self.x[end_ind+1], self.y1[end_ind], self.y2[end_ind], interval[1] )) diff --git a/pyspike/__init__.py b/pyspike/__init__.py index 08253fb..3897d18 100644 --- a/pyspike/__init__.py +++ b/pyspike/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014-2015, Mario Mulansky +Copyright 2014-2018, Mario Mulansky Distributed under the BSD License """ @@ -7,8 +7,8 @@ Distributed under the BSD License from __future__ import absolute_import __all__ = ["isi_distance", "spike_distance", "spike_sync", "psth", - "spikes", "SpikeTrain", "PieceWiseConstFunc", "PieceWiseLinFunc", - "DiscreteFunc", "directionality"] + "spikes", "spike_directionality", "SpikeTrain", + "PieceWiseConstFunc", "PieceWiseLinFunc", "DiscreteFunc"] from .PieceWiseConstFunc import PieceWiseConstFunc from .PieceWiseLinFunc import PieceWiseLinFunc @@ -20,13 +20,21 @@ from .isi_distance import isi_profile, isi_distance, isi_profile_multi,\ from .spike_distance import spike_profile, spike_distance, spike_profile_multi,\ spike_distance_multi, spike_distance_matrix from .spike_sync import spike_sync_profile, spike_sync,\ - spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix + spike_sync_profile_multi, spike_sync_multi, spike_sync_matrix,\ + filter_by_spike_sync from .psth import psth from .spikes import load_spike_trains_from_txt, save_spike_trains_to_txt, \ spike_train_from_string, import_spike_trains_from_time_series, \ merge_spike_trains, generate_poisson_spikes +from .spike_directionality import spike_directionality, \ + spike_directionality_values, spike_directionality_matrix, \ + spike_train_order_profile, spike_train_order_profile_bi, \ + spike_train_order_profile_multi, spike_train_order, \ + spike_train_order_bi, spike_train_order_multi, \ + optimal_spike_train_sorting, permutate_matrix + # define the __version__ following # http://stackoverflow.com/questions/17583443 from pkg_resources import get_distribution, DistributionNotFound diff --git a/pyspike/cython/cython_directionality.pyx b/pyspike/cython/cython_directionality.pyx new file mode 100644 index 0000000..ac37690 --- /dev/null +++ b/pyspike/cython/cython_directionality.pyx @@ -0,0 +1,262 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_directionality.pyx + +cython implementation of the spike delay asymmetry measures + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_directionality.pyx + +which gives:: + + cython_directionality.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport fabs +from libc.math cimport fmax +from libc.math cimport fmin + +# from pyspike.cython.cython_distances cimport get_tau + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +############################################################ +# get_tau +############################################################ +cdef inline double get_tau(double[:] spikes1, double[:] spikes2, + int i, int j, double interval, double max_tau): + cdef double m = interval # use interval length as initial tau + cdef int N1 = spikes1.shape[0]-1 # len(spikes1)-1 + cdef int N2 = spikes2.shape[0]-1 # len(spikes2)-1 + if i < N1 and i > -1: + m = fmin(m, spikes1[i+1]-spikes1[i]) + if j < N2 and j > -1: + m = fmin(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = fmin(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = fmin(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = fmin(m, max_tau) + return m + + +############################################################ +# spike_train_order_profile_cython +############################################################ +def spike_train_order_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int n = 0 + cdef double[:] st = np.zeros(N1 + N2 + 2) # spike times + cdef double[:] a = np.zeros(N1 + N2 + 2) # asymmetry values + cdef double[:] mp = np.ones(N1 + N2 + 2) # multiplicity + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # both get marked with -1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp + + +############################################################ +# spike_train_order_cython +############################################################ +def spike_train_order_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 + cdef int mp = 0 + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 2 appeared before spike in spike train 1 + # mark with -1 + d -= 2 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + mp += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in spike train 1 appeared before spike in spike train 2 + # mark with +1 + d += 2 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # add only one event with multiplicity 2, but no asymmetry counting + mp += 2 + + if d == 0 and mp == 0: + # empty spike trains -> spike sync = 1 by definition + d = 1 + mp = 1 + + return d, mp + + +############################################################ +# spike_directionality_profiles_cython +############################################################ +def spike_directionality_profiles_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef double[:] d1 = np.zeros(N1) # directionality values + cdef double[:] d2 = np.zeros(N2) # directionality values + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + # equal spike times: zero asymmetry value + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_directionality_cython +############################################################ +def spike_directionality_cython(double[:] spikes1, + double[:] spikes2, + double t_start, double t_end, + double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int i = -1 + cdef int j = -1 + cdef int d = 0 # directionality value + cdef double interval = t_end - t_start + cdef double tau + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 after spike train 2 + # leading spike gets +1, following spike -1 + d -= 1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike from spike train 1 before spike train 2 + # leading spike gets +1, following spike -1 + d += 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + + return d diff --git a/pyspike/cython/cython_distances.pyx b/pyspike/cython/cython_distances.pyx index ac5f226..d4070ae 100644 --- a/pyspike/cython/cython_distances.pyx +++ b/pyspike/cython/cython_distances.pyx @@ -178,6 +178,8 @@ cdef inline double isi_avrg_cython(double isi1, double isi2) nogil: return 0.5*(isi1+isi2)*(isi1+isi2) # alternative definition to obtain ~ 0.5 for Poisson spikes # return 0.5*(isi1*isi1+isi2*isi2) + # another alternative definition without second normalization + # return 0.5*(isi1+isi2) ############################################################ @@ -248,6 +250,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, index2 = 0 y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) index = 1 while index1+index2 < N1+N2-2: @@ -267,6 +271,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p1 s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -286,6 +292,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_p1 # s2 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): index2 += 1 # first calculate the previous interval end value @@ -301,6 +309,8 @@ def spike_distance_cython(double[:] t1, double[:] t2, t_curr = t_p2 s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) @@ -320,6 +330,9 @@ def spike_distance_cython(double[:] t1, double[:] t2, s2 = dt_p2 # s1 is the same as above, thus we can compute y2 immediately y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_start = (s1 + s2) / isi_avrg_cython(isi1, isi2) + else: # t_f1 == t_f2 - generate only one event index1 += 1 index2 += 1 @@ -358,6 +371,193 @@ def spike_distance_cython(double[:] t1, double[:] t2, s1 = dt_f1 # *(t_end-t1[N1-1])/isi1 s2 = dt_f2 # *(t_end-t2[N2-1])/isi2 y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + # y_end = (s1 + s2) / isi_avrg_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) + # end nogil + + # use only the data added above + # could be less than original length due to equal spike times + return spike_value / (t_end-t_start) + + +############################################################ +# isi_avrg_rf_cython +############################################################ +cdef inline double isi_avrg_rf_cython(double isi1, double isi2) nogil: + # rate free version + return (isi1+isi2) + + +############################################################ +# spike_distance_rf_cython +############################################################ +def spike_distance_rf_cython(double[:] t1, double[:] t2, + double t_start, double t_end): + + cdef int N1, N2, index1, index2, index + cdef double t_p1, t_f1, t_p2, t_f2, dt_p1, dt_p2, dt_f1, dt_f2 + cdef double isi1, isi2, s1, s2 + cdef double y_start, y_end, t_last, t_current, spike_value + + spike_value = 0.0 + + N1 = len(t1) + N2 = len(t2) + + with nogil: # release the interpreter to allow multithreading + t_last = t_start + t_p1 = t_start + t_p2 = t_start + if t1[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f1 = t1[0] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + isi1 = fmax(t_f1-t_start, t1[1]-t1[0]) + dt_p1 = dt_f1 + s1 = dt_p1*(t_f1-t_start)/isi1 + index1 = -1 + else: + t_f1 = t1[1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, 0, t_start, t_end) + dt_p1 = 0.0 + isi1 = t1[1]-t1[0] + s1 = dt_p1 + index1 = 0 + if t2[0] > t_start: + # dt_p1 = t2[0]-t_start + t_f2 = t2[0] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = dt_f2 + isi2 = fmax(t_f2-t_start, t2[1]-t2[0]) + s2 = dt_p2*(t_f2-t_start)/isi2 + index2 = -1 + else: + t_f2 = t2[1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, 0, t_start, t_end) + dt_p2 = 0.0 + isi2 = t2[1]-t2[0] + s2 = dt_p2 + index2 = 0 + + # y_start = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + index = 1 + + while index1+index2 < N1+N2-2: + # print(index, index1, index2) + if (index1 < N1-1) and (t_f1 < t_f2 or index2 == N2-1): + index1 += 1 + # first calculate the previous interval end value + s1 = dt_f1*(t_f1-t_p1) / isi1 + # the previous time now was the following time before: + dt_p1 = dt_f1 + t_p1 = t_f1 # t_p1 contains the current time point + # get the next time + if index1 < N1-1: + t_f1 = t1[index1+1] + else: + t_f1 = t_end + t_curr = t_p1 + s2 = (dt_p2*(t_f2-t_p1) + dt_f2*(t_p1-t_p2)) / isi2 + # y_end = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index1 < N1-1: + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1-t_p1 + s1 = dt_p1 + else: + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # s1 needs adjustment due to change of isi1 + s1 = dt_p1*(t_end-t1[N1-1])/isi1 + # s2 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + elif (index2 < N2-1) and (t_f1 > t_f2 or index1 == N1-1): + index2 += 1 + # first calculate the previous interval end value + s2 = dt_f2*(t_f2-t_p2) / isi2 + # the previous time now was the following time before: + dt_p2 = dt_f2 + t_p2 = t_f2 # t_p2 contains the current time point + # get the next time + if index2 < N2-1: + t_f2 = t2[index2+1] + else: + t_f2 = t_end + t_curr = t_p2 + s1 = (dt_p1*(t_f1-t_p2) + dt_f1*(t_p2-t_p1)) / isi1 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + + # now the next interval start value + if index2 < N2-1: + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2-t_p2 + s2 = dt_p2 + else: + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + # s2 needs adjustment due to change of isi2 + s2 = dt_p2*(t_end-t2[N2-1])/isi2 + # s1 is the same as above, thus we can compute y2 immediately + # y_start = (s1*isi2 + s2*isi1)/isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_start = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + + else: # t_f1 == t_f2 - generate only one event + index1 += 1 + index2 += 1 + t_p1 = t_f1 + t_p2 = t_f2 + dt_p1 = 0.0 + dt_p2 = 0.0 + t_curr = t_f1 + y_end = 0.0 + spike_value += 0.5*(y_start + y_end) * (t_curr - t_last) + y_start = 0.0 + if index1 < N1-1: + t_f1 = t1[index1+1] + dt_f1 = get_min_dist_cython(t_f1, t2, N2, index2, + t_start, t_end) + isi1 = t_f1 - t_p1 + else: + t_f1 = t_end + dt_f1 = dt_p1 + isi1 = fmax(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + if index2 < N2-1: + t_f2 = t2[index2+1] + dt_f2 = get_min_dist_cython(t_f2, t1, N1, index1, + t_start, t_end) + isi2 = t_f2 - t_p2 + else: + t_f2 = t_end + dt_f2 = dt_p2 + isi2 = fmax(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + index += 1 + t_last = t_curr + # isi1 = max(t_end-t1[N1-1], t1[N1-1]-t1[N1-2]) + # isi2 = max(t_end-t2[N2-1], t2[N2-1]-t2[N2-2]) + s1 = dt_f1*(t_end-t1[N1-1])/isi1 + s2 = dt_f2*(t_end-t2[N2-1])/isi2 + # y_end = (s1*isi2 + s2*isi1) / isi_avrg_cython(isi1, isi2) + # alternative definition without second normalization + y_end = (s1 + s2) / isi_avrg_rf_cython(isi1, isi2) + spike_value += 0.5*(y_start + y_end) * (t_end - t_last) # end nogil diff --git a/pyspike/cython/cython_profiles.pyx b/pyspike/cython/cython_profiles.pyx index 4a42cdb..aa24db4 100644 --- a/pyspike/cython/cython_profiles.pyx +++ b/pyspike/cython/cython_profiles.pyx @@ -450,3 +450,36 @@ def coincidence_profile_cython(double[:] spikes1, double[:] spikes2, c[1] = 1 return st, c, mp + + +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_profile_cython(double[:] spikes1, double[:] spikes2, + double t_start, double t_end, double max_tau): + + cdef int N1 = len(spikes1) + cdef int N2 = len(spikes2) + cdef int j = -1 + cdef double[:] c = np.zeros(N1) # coincidences + cdef double interval = t_end - t_start + cdef double tau + for i in xrange(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if j > -1 and fabs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the one + # right before (see above), hence we move one forward and also + # check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, interval, max_tau) + if fabs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c diff --git a/pyspike/cython/cython_simulated_annealing.pyx b/pyspike/cython/cython_simulated_annealing.pyx new file mode 100644 index 0000000..be9423c --- /dev/null +++ b/pyspike/cython/cython_simulated_annealing.pyx @@ -0,0 +1,82 @@ +#cython: boundscheck=False +#cython: wraparound=False +#cython: cdivision=True + +""" +cython_simulated_annealing.pyx + +cython implementation of a simulated annealing algorithm to find the optimal +spike train order + +Note: using cython memoryviews (e.g. double[:]) instead of ndarray objects +improves the performance of spike_distance by a factor of 10! + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +""" +To test whether things can be optimized: remove all yellow stuff +in the html output:: + + cython -a cython_simulated_annealing.pyx + +which gives: + + cython_simulated_annealing.html + +""" + +import numpy as np +cimport numpy as np + +from libc.math cimport exp +from libc.math cimport fmod +from libc.stdlib cimport rand +from libc.stdlib cimport RAND_MAX + +DTYPE = np.float +ctypedef np.float_t DTYPE_t + + +def sim_ann_cython(double[:, :] D, double T_start, double T_end, double alpha): + + cdef long N = len(D) + cdef double A = np.sum(np.triu(D, 0)) + cdef long[:] p = np.arange(N) + cdef double T = T_start + cdef long iterations + cdef long succ_iter + cdef long total_iter = 0 + cdef double delta_A + cdef long ind1 + cdef long ind2 + + while T > T_end: + iterations = 0 + succ_iter = 0 + # equilibrate for 100*N steps or 10*N successful steps + while iterations < 100*N and succ_iter < 10*N: + # exchange two rows and cols + # ind1 = np.random.randint(N-1) + ind1 = rand() % (N-1) + if ind1 < N-1: + ind2 = ind1+1 + else: # this can never happen! + ind2 = 0 + delta_A = -2*D[p[ind1], p[ind2]] + if delta_A > 0.0 or exp(delta_A/T) > ((1.0*rand()) / RAND_MAX): + # swap indices + p[ind1], p[ind2] = p[ind2], p[ind1] + A += delta_A + succ_iter += 1 + iterations += 1 + total_iter += iterations + T *= alpha # cool down + if succ_iter == 0: + # no successful step -> we believe we have converged + break + + return p, A, total_iter diff --git a/pyspike/cython/directionality_python_backend.py b/pyspike/cython/directionality_python_backend.py new file mode 100644 index 0000000..c1d820b --- /dev/null +++ b/pyspike/cython/directionality_python_backend.py @@ -0,0 +1,144 @@ +""" directionality_python_backend.py + +Collection of python functions that can be used instead of the cython +implementation. + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np + + +############################################################ +# spike_train_order_python +############################################################ +def spike_directionality_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + d1 = np.zeros(N1) # directionality values + d2 = np.zeros(N2) # directionality values + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # spike in first spike train occurs after second + d1[i] = -1 + d2[j] = +1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # spike in second spike train occurs after first + d1[i] = +1 + d2[j] = -1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + d1[i] = 0 + d2[j] = 0 + + return d1, d2 + + +############################################################ +# spike_train_order_python +############################################################ +def spike_train_order_profile_python(spikes1, spikes2, t_start, t_end, + max_tau): + + def get_tau(spikes1, spikes2, i, j, max_tau): + m = t_end - t_start # use interval as initial tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + N1 = len(spikes1) + N2 = len(spikes2) + i = -1 + j = -1 + n = 0 + st = np.zeros(N1 + N2 + 2) # spike times + a = np.zeros(N1 + N2 + 2) # coincidences + mp = np.ones(N1 + N2 + 2) # multiplicity + while i + j < N1 + N2 - 2: + if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): + i += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes1[i] + if j > -1 and spikes1[i]-spikes2[j] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = -1 + a[n-1] = -1 + elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): + j += 1 + n += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau) + st[n] = spikes2[j] + if i > -1 and spikes2[j]-spikes1[i] < tau: + # coincidence between the current spike and the previous spike + # both get marked with 1 + a[n] = 1 + a[n-1] = 1 + else: # spikes1[i+1] = spikes2[j+1] + # advance in both spike trains + j += 1 + i += 1 + n += 1 + # add only one event with zero asymmetry value and multiplicity 2 + st[n] = spikes1[i] + a[n] = 0 + mp[n] = 2 + + st = st[:n+2] + a = a[:n+2] + mp = mp[:n+2] + + st[0] = t_start + st[len(st)-1] = t_end + if N1 + N2 > 0: + a[0] = a[1] + a[len(a)-1] = a[len(a)-2] + mp[0] = mp[1] + mp[len(mp)-1] = mp[len(mp)-2] + else: + a[0] = 1 + a[1] = 1 + + return st, a, mp diff --git a/pyspike/cython/python_backend.py b/pyspike/cython/python_backend.py index 6b7209a..e75f181 100644 --- a/pyspike/cython/python_backend.py +++ b/pyspike/cython/python_backend.py @@ -3,7 +3,7 @@ Collection of python functions that can be used instead of the cython implementation. -Copyright 2014, Mario Mulansky +Copyright 2014-2015, Mario Mulansky Distributed under the BSD License @@ -356,26 +356,27 @@ def cumulative_sync_python(spikes1, spikes2): return st, c +def get_tau(spikes1, spikes2, i, j, max_tau, init_tau): + m = init_tau + if i < len(spikes1)-1 and i > -1: + m = min(m, spikes1[i+1]-spikes1[i]) + if j < len(spikes2)-1 and j > -1: + m = min(m, spikes2[j+1]-spikes2[j]) + if i > 0: + m = min(m, spikes1[i]-spikes1[i-1]) + if j > 0: + m = min(m, spikes2[j]-spikes2[j-1]) + m *= 0.5 + if max_tau > 0.0: + m = min(m, max_tau) + return m + + ############################################################ # coincidence_python ############################################################ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): - def get_tau(spikes1, spikes2, i, j, max_tau): - m = t_end - t_start # use interval as initial tau - if i < len(spikes1)-1 and i > -1: - m = min(m, spikes1[i+1]-spikes1[i]) - if j < len(spikes2)-1 and j > -1: - m = min(m, spikes2[j+1]-spikes2[j]) - if i > 0: - m = min(m, spikes1[i]-spikes1[i-1]) - if j > 0: - m = min(m, spikes2[j]-spikes2[j-1]) - m *= 0.5 - if max_tau > 0.0: - m = min(m, max_tau) - return m - N1 = len(spikes1) N2 = len(spikes2) i = -1 @@ -388,7 +389,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): if (i < N1-1) and (j == N2-1 or spikes1[i+1] < spikes2[j+1]): i += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes1[i] if j > -1 and spikes1[i]-spikes2[j] < tau: # coincidence between the current spike and the previous spike @@ -398,7 +399,7 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): elif (j < N2-1) and (i == N1-1 or spikes1[i+1] > spikes2[j+1]): j += 1 n += 1 - tau = get_tau(spikes1, spikes2, i, j, max_tau) + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) st[n] = spikes2[j] if i > -1 and spikes2[j]-spikes1[i] < tau: # coincidence between the current spike and the previous spike @@ -433,6 +434,36 @@ def coincidence_python(spikes1, spikes2, t_start, t_end, max_tau): return st, c, mp +############################################################ +# coincidence_single_profile_cython +############################################################ +def coincidence_single_python(spikes1, spikes2, t_start, t_end, max_tau): + + N1 = len(spikes1) + N2 = len(spikes2) + j = -1 + c = np.zeros(N1) # coincidences + for i in range(N1): + while j < N2-1 and spikes2[j+1] < spikes1[i]: + # move forward until spikes2[j] is the last spike before spikes1[i] + # note that if spikes2[j] is after spikes1[i] we dont do anything + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if j > -1 and abs(spikes1[i]-spikes2[j]) < tau: + # current spike in st1 is coincident + c[i] = 1 + if j < N2-1 and (j < 0 or spikes2[j] < spikes1[i]): + # in case spikes2[j] is before spikes1[i] it has to be the first or + # the one right before (see above), hence we move one forward and + # also check the next spike + j += 1 + tau = get_tau(spikes1, spikes2, i, j, max_tau, t_end-t_start) + if abs(spikes2[j]-spikes1[i]) < tau: + # current spike in st1 is coincident + c[i] = 1 + return c + + ############################################################ # add_piece_wise_const_python ############################################################ diff --git a/pyspike/spike_directionality.py b/pyspike/spike_directionality.py new file mode 100644 index 0000000..248862c --- /dev/null +++ b/pyspike/spike_directionality.py @@ -0,0 +1,522 @@ +# Module containing functions to compute the SPIKE directionality and the +# spike train order profile +# Copyright 2015, Mario Mulansky +# Distributed under the BSD License + +from __future__ import absolute_import + +import numpy as np +import pyspike +from pyspike import DiscreteFunc +from functools import partial +from pyspike.generic import _generic_profile_multi + + +############################################################ +# spike_directionality_values +############################################################ +def spike_directionality_values(*args, **kwargs): + """ Computes the spike directionality value for each spike in + each spike train. Returns a list containing an array of spike directionality + values for every given spike train. + + Valid call structures:: + + spike_directionality_values(st1, st2) # returns the bi-variate profile + spike_directionality_values(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_directionality_values(spike_trains) # profile of the list of spike trains + spike_directionality_values(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window (default=None). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike directionality values :math:`D^n_i` as a list of arrays. + """ + if len(args) == 1: + return _spike_directionality_values_impl(args[0], **kwargs) + else: + return _spike_directionality_values_impl(args, **kwargs) + + +def _spike_directionality_values_impl(spike_trains, indices=None, + interval=None, max_tau=None): + """ Computes the multi-variate spike directionality profile + of the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if interval is not None: + raise NotImplementedError("Parameter `interval` not supported.") + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # list of arrays for resulting asymmetry values + asymmetry_list = [np.zeros_like(spike_trains[n].spikes) for n in indices] + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_directionality_profiles_cython as profile_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_directionality_profile_python as profile_impl + + if max_tau is None: + max_tau = 0.0 + + for i, j in pairs: + d1, d2 = profile_impl(spike_trains[i].spikes, spike_trains[j].spikes, + spike_trains[i].t_start, spike_trains[i].t_end, + max_tau) + asymmetry_list[i] += d1 + asymmetry_list[j] += d2 + for a in asymmetry_list: + a /= len(spike_trains)-1 + return asymmetry_list + + +############################################################ +# spike_directionality +############################################################ +def spike_directionality(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike directionality of the first spike train with + respect to the second spike train. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_directionality_cython as spike_directionality_impl + if max_tau is None: + max_tau = 0.0 + d = spike_directionality_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + c = len(spike_train1.spikes) + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use profile. + d1, x = spike_directionality_values([spike_train1, spike_train2], + interval=interval, + max_tau=max_tau) + d = np.sum(d1) + c = len(spike_train1.spikes) + if normalize: + return 1.0*d/c + else: + return d + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_directionality_matrix +############################################################ +def spike_directionality_matrix(spike_trains, normalize=True, indices=None, + interval=None, max_tau=None): + """ Computes the spike directionality matrix for the given spike trains. + + :param spike_trains: List of spike trains. + :type spike_trains: List of :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike-directionality values. + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + distance_matrix = np.zeros((len(indices), len(indices))) + for i, j in pairs: + d = spike_directionality(spike_trains[i], spike_trains[j], normalize, + interval, max_tau=max_tau) + distance_matrix[i, j] = d + distance_matrix[j, i] = -d + return distance_matrix + + +############################################################ +# spike_train_order_profile +############################################################ +def spike_train_order_profile(*args, **kwargs): + """ Computes the spike train order profile :math:`E(t)` of the given + spike trains. Returns the profile as a DiscreteFunction object. + + Valid call structures:: + + spike_train_order_profile(st1, st2) # returns the bi-variate profile + spike_train_order_profile(st1, st2, st3) # multi-variate profile of 3 + # spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order_profile(spike_trains) # profile of the list of spike trains + spike_train_order_profile(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + :param max_tau: Upper bound for coincidence window, `default=None`. + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + + :returns: The spike train order profile :math:`E(t)` + :rtype: :class:`.DiscreteFunction` + """ + if len(args) == 1: + return spike_train_order_profile_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_profile_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_profile_multi(args, **kwargs) + + +############################################################ +# spike_train_order_profile_bi +############################################################ +def spike_train_order_profile_bi(spike_train1, spike_train2, max_tau=None): + """ Computes the spike train order profile P(t) of the two given + spike trains. Returns the profile as a DiscreteFunction object. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order profile :math:`E(t)`. + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + # check whether the spike trains are defined for the same interval + assert spike_train1.t_start == spike_train2.t_start, \ + "Given spike trains are not defined on the same interval!" + assert spike_train1.t_end == spike_train2.t_end, \ + "Given spike trains are not defined on the same interval!" + + # cython implementation + try: + from .cython.cython_directionality import \ + spike_train_order_profile_cython as \ + spike_train_order_profile_impl + except ImportError: + # raise NotImplementedError() + if not(pyspike.disable_backend_warning): + print("Warning: spike_distance_cython not found. Make sure that \ +PySpike is installed by running\n 'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.directionality_python_backend import \ + spike_train_order_profile_python as spike_train_order_profile_impl + + if max_tau is None: + max_tau = 0.0 + + times, coincidences, multiplicity \ + = spike_train_order_profile_impl(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + + return DiscreteFunc(times, coincidences, multiplicity) + + +############################################################ +# spike_train_order_profile_multi +############################################################ +def spike_train_order_profile_multi(spike_trains, indices=None, + max_tau=None): + """ Computes the multi-variate spike train order profile for a set of + spike trains. For each spike in the set of spike trains, the multi-variate + profile is defined as the sum of asymmetry values divided by the number of + spike trains pairs involving the spike train of containing this spike, + which is the number of spike trains minus one (N-1). + + :param spike_trains: list of :class:`pyspike.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The multi-variate spike sync profile :math:`(t)` + :rtype: :class:`pyspike.function.DiscreteFunction` + """ + prof_func = partial(spike_train_order_profile_bi, max_tau=max_tau) + average_prof, M = _generic_profile_multi(spike_trains, prof_func, + indices) + return average_prof + + + +############################################################ +# _spike_train_order_impl +############################################################ +def _spike_train_order_impl(spike_train1, spike_train2, + interval=None, max_tau=None): + """ Implementation of bi-variatae spike train order value (Synfire Indicator). + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + if interval is None: + # distance over the whole interval is requested: use specific function + # for optimal performance + try: + from .cython.cython_directionality import \ + spike_train_order_cython as spike_train_order_func + if max_tau is None: + max_tau = 0.0 + c, mp = spike_train_order_func(spike_train1.spikes, + spike_train2.spikes, + spike_train1.t_start, + spike_train1.t_end, + max_tau) + except ImportError: + # Cython backend not available: fall back to profile averaging + c, mp = spike_train_order_profile(spike_train1, spike_train2, + max_tau=max_tau).integral(interval) + return c, mp + else: + # some specific interval is provided: not yet implemented + raise NotImplementedError("Parameter `interval` not supported.") + + +############################################################ +# spike_train_order +############################################################ +def spike_train_order(*args, **kwargs): + """ Computes the spike train order (Synfire Indicator) of the given + spike trains. + + Valid call structures:: + + spike_train_order(st1, st2, normalize=True) # normalized bi-variate + # spike train order + spike_train_order(st1, st2, st3) # multi-variate result of 3 spike trains + + spike_trains = [st1, st2, st3, st4] # list of spike trains + spike_train_order(spike_trains) # result for the list of spike trains + spike_train_order(spike_trains, indices=[0, 1]) # use only the spike trains + # given by the indices + + Additonal arguments: + - `max_tau` Upper bound for coincidence window, `default=None`. + - `normalize` Flag indicating if the reslut should be normalized by the + number of spikes , default=`False` + + + :returns: The spike train order value (Synfire Indicator) + """ + if len(args) == 1: + return spike_train_order_multi(args[0], **kwargs) + elif len(args) == 2: + return spike_train_order_bi(args[0], args[1], **kwargs) + else: + return spike_train_order_multi(args, **kwargs) + + +############################################################ +# spike_train_order_bi +############################################################ +def spike_train_order_bi(spike_train1, spike_train2, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for two spike trains. + + :param spike_train1: First spike train. + :type spike_train1: :class:`pyspike.SpikeTrain` + :param spike_train2: Second spike train. + :type spike_train2: :class:`pyspike.SpikeTrain` + :param normalize: Normalize by the number of spikes (multiplicity). + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: The spike train order value (Synfire Indicator) + """ + c, mp = _spike_train_order_impl(spike_train1, spike_train2, interval, max_tau) + if normalize: + return 1.0*c/mp + else: + return c + +############################################################ +# spike_train_order_multi +############################################################ +def spike_train_order_multi(spike_trains, indices=None, normalize=True, + interval=None, max_tau=None): + """ Computes the overall spike train order value (Synfire Indicator) + for many spike trains. + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :param normalize: Normalize by the number of spike (multiplicity). + :param interval: averaging interval given as a pair of floats, if None + the average over the whole function is computed. + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound. + :returns: Spike train order values (Synfire Indicator) F for the given spike trains. + :rtype: double + """ + if indices is None: + indices = np.arange(len(spike_trains)) + indices = np.array(indices) + # check validity of indices + assert (indices < len(spike_trains)).all() and (indices >= 0).all(), \ + "Invalid index list." + # generate a list of possible index pairs + pairs = [(indices[i], j) for i in range(len(indices)) + for j in indices[i+1:]] + + e_total = 0.0 + m_total = 0.0 + for (i, j) in pairs: + e, m = _spike_train_order_impl(spike_trains[i], spike_trains[j], + interval, max_tau) + e_total += e + m_total += m + + if m == 0.0: + return 1.0 + else: + return e_total/m_total + + + +############################################################ +# optimal_spike_train_sorting_from_matrix +############################################################ +def _optimal_spike_train_sorting_from_matrix(D, full_output=False): + """ Finds the best sorting via simulated annealing. + Returns the optimal permutation p and A value. + Not for direct use, call :func:`.optimal_spike_train_sorting` instead. + + :param D: The directionality (Spike-ORDER) matrix. + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + N = len(D) + A = np.sum(np.triu(D, 0)) + + p = np.arange(N) + + T_start = 2*np.max(D) # starting temperature + T_end = 1E-5 * T_start # final temperature + alpha = 0.9 # cooling factor + + try: + from .cython.cython_simulated_annealing import sim_ann_cython as sim_ann + except ImportError: + raise NotImplementedError("PySpike with Cython required for computing spike train" + " sorting!") + + p, A, total_iter = sim_ann(D, T_start, T_end, alpha) + + if full_output: + return p, A, total_iter + else: + return p, A + + +############################################################ +# optimal_spike_train_sorting +############################################################ +def optimal_spike_train_sorting(spike_trains, indices=None, interval=None, + max_tau=None, full_output=False): + """ Finds the best sorting of the given spike trains by computing the spike + directionality matrix and optimize the order using simulated annealing. + For a detailed description of the algorithm see: + `http://iopscience.iop.org/article/10.1088/1367-2630/aa68c3/meta` + + :param spike_trains: list of :class:`.SpikeTrain` + :param indices: list of indices defining which spike trains to use, + if None all given spike trains are used (default=None) + :type indices: list or None + :param interval: time interval filter given as a pair of floats, if None + the full spike trains are used (default=None). + :type interval: Pair of floats or None. + :param max_tau: Maximum coincidence window size. If 0 or `None`, the + coincidence window has no upper bound (default=None). + :param full_output: If true, then function will additionally return the + number of performed iterations (default=False) + :return: (p, F) - tuple with the optimal permutation and synfire indicator. + if `full_output=True` , (p, F, iter) is returned. + """ + D = spike_directionality_matrix(spike_trains, normalize=False, + indices=indices, interval=interval, + max_tau=max_tau) + return _optimal_spike_train_sorting_from_matrix(D, full_output) + +############################################################ +# permutate_matrix +############################################################ +def permutate_matrix(D, p): + """ Helper function that applies the permutation p to the columns and rows + of matrix D. Return the permutated matrix :math:`D'[n,m] = D[p[n], p[m]]`. + + :param D: The matrix. + :param d: The permutation. + :return: The permuated matrix D', ie :math:`D'[n,m] = D[p[n], p[m]]` + """ + N = len(D) + D_p = np.empty_like(D) + for n in range(N): + for m in range(N): + D_p[n, m] = D[p[n], p[m]] + return D_p diff --git a/pyspike/spike_sync.py b/pyspike/spike_sync.py index 80f7805..95ef454 100644 --- a/pyspike/spike_sync.py +++ b/pyspike/spike_sync.py @@ -8,7 +8,7 @@ from __future__ import absolute_import import numpy as np from functools import partial import pyspike -from pyspike import DiscreteFunc +from pyspike import DiscreteFunc, SpikeTrain from pyspike.generic import _generic_profile_multi, _generic_distance_matrix @@ -45,9 +45,9 @@ def spike_sync_profile(*args, **kwargs): if len(args) == 1: return spike_sync_profile_multi(args[0], **kwargs) elif len(args) == 2: - return spike_sync_profile_bi(args[0], args[1]) + return spike_sync_profile_bi(args[0], args[1], **kwargs) else: - return spike_sync_profile_multi(args) + return spike_sync_profile_multi(args, **kwargs) ############################################################ @@ -290,3 +290,52 @@ def spike_sync_matrix(spike_trains, indices=None, interval=None, max_tau=None): dist_func = partial(spike_sync_bi, max_tau=max_tau) return _generic_distance_matrix(spike_trains, dist_func, indices, interval) + + +############################################################ +# filter_by_spike_sync +############################################################ +def filter_by_spike_sync(spike_trains, threshold, indices=None, max_tau=None, + return_removed_spikes=False): + """ Removes the spikes with a multi-variate spike_sync value below + threshold. + """ + N = len(spike_trains) + filtered_spike_trains = [] + removed_spike_trains = [] + + # cython implementation + try: + from .cython.cython_profiles import coincidence_single_profile_cython \ + as coincidence_impl + except ImportError: + if not(pyspike.disable_backend_warning): + print("Warning: coincidence_single_profile_cython not found. Make \ +sure that PySpike is installed by running\n \ +'python setup.py build_ext --inplace'!\n \ +Falling back to slow python backend.") + # use python backend + from .cython.python_backend import coincidence_single_python \ + as coincidence_impl + + if max_tau is None: + max_tau = 0.0 + + for i, st in enumerate(spike_trains): + coincidences = np.zeros_like(st) + for j in range(N): + if i == j: + continue + coincidences += coincidence_impl(st.spikes, spike_trains[j].spikes, + st.t_start, st.t_end, max_tau) + filtered_spikes = st[coincidences > threshold*(N-1)] + filtered_spike_trains.append(SpikeTrain(filtered_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + removed_spikes = st[coincidences <= threshold*(N-1)] + removed_spike_trains.append(SpikeTrain(removed_spikes, + [st.t_start, st.t_end])) + if return_removed_spikes: + return [filtered_spike_trains, removed_spike_trains] + else: + return filtered_spike_trains diff --git a/setup.py b/setup.py index 5b9e677..b5b01a6 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,9 @@ class numpy_include(object): if os.path.isfile("pyspike/cython/cython_add.c") and \ os.path.isfile("pyspike/cython/cython_profiles.c") and \ - os.path.isfile("pyspike/cython/cython_distances.c"): + os.path.isfile("pyspike/cython/cython_distances.c") and \ + os.path.isfile("pyspike/cython/cython_directionality.c") and \ + os.path.isfile("pyspike/cython/cython_simulated_annealing.c"): use_c = True else: use_c = False @@ -45,7 +47,11 @@ if use_cython: # Cython is available, compile .pyx -> .c Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.pyx"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.pyx"]) + ["pyspike/cython/cython_distances.pyx"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.pyx"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.pyx"]) ] cmdclass.update({'build_ext': build_ext}) elif use_c: # c files are there, compile to binaries @@ -55,14 +61,18 @@ elif use_c: # c files are there, compile to binaries Extension("pyspike.cython.cython_profiles", ["pyspike/cython/cython_profiles.c"]), Extension("pyspike.cython.cython_distances", - ["pyspike/cython/cython_distances.c"]) + ["pyspike/cython/cython_distances.c"]), + Extension("pyspike.cython.cython_directionality", + ["pyspike/cython/cython_directionality.c"]), + Extension("pyspike.cython.cython_simulated_annealing", + ["pyspike/cython/cython_simulated_annealing.c"]) ] # neither cython nor c files available -> automatic fall-back to python backend setup( name='pyspike', packages=find_packages(exclude=['doc']), - version='0.5.3', + version='0.6.0', cmdclass=cmdclass, ext_modules=ext_modules, include_dirs=[numpy_include()], @@ -90,11 +100,17 @@ train similarity', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' - ] + ], + package_data={ + 'pyspike': ['cython/cython_add.c', 'cython/cython_profiles.c', + 'cython/cython_distances.c', + 'cython/cython_directionality.c', + 'cython/cython_simulated_annealing.c'], + 'test': ['Spike_testdata.txt'] + } ) diff --git a/test/test_directionality.py b/test/test_directionality.py new file mode 100644 index 0000000..c2e9bfe --- /dev/null +++ b/test/test_directionality.py @@ -0,0 +1,97 @@ +""" test_directionality.py + +Tests the directionality functions + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_equal + +import pyspike as spk +from pyspike import SpikeTrain, DiscreteFunc + + +def test_spike_directionality(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_directionality(st1, st2, normalize=False), + 2.0) + + # exchange order of spike trains should give exact negative profile + assert_almost_equal(spk.spike_directionality(st2, st1), -2.0/3.0) + assert_almost_equal(spk.spike_directionality(st2, st1, normalize=False), + -2.0) + + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + assert_almost_equal(spk.spike_directionality(st1, st3), 0.0) + assert_almost_equal(spk.spike_directionality(st1, st3, normalize=False), + 0.0) + assert_almost_equal(spk.spike_directionality(st3, st1), 0.0) + + D = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + D_expected = np.array([[0, 2.0, 0.0], [-2.0, 0.0, -1.0], [0.0, 1.0, 0.0]]) + assert_array_equal(D, D_expected) + + dir_profs = spk.spike_directionality_values([st1, st2, st3]) + assert_array_equal(dir_profs[0], [1.0, 0.0, 0.0]) + assert_array_equal(dir_profs[1], [-0.5, -1.0, 0.0]) + + +def test_spike_train_order(): + st1 = SpikeTrain([100, 200, 300], [0, 1000]) + st2 = SpikeTrain([105, 205, 300], [0, 1000]) + st3 = SpikeTrain([105, 195, 500], [0, 1000]) + + expected_x12 = np.array([0, 100, 105, 200, 205, 300, 1000]) + expected_y12 = np.array([1, 1, 1, 1, 1, 0, 0]) + expected_mp12 = np.array([1, 1, 1, 1, 1, 2, 2]) + + f = spk.spike_train_order_profile(st1, st2) + + assert f.almost_equal(DiscreteFunc(expected_x12, expected_y12, + expected_mp12)) + assert_almost_equal(f.avrg(), 2.0/3.0) + assert_almost_equal(f.avrg(normalize=False), 4.0) + assert_almost_equal(spk.spike_train_order(st1, st2), 2.0/3.0) + assert_almost_equal(spk.spike_train_order(st1, st2, normalize=False), 4.0) + + expected_x23 = np.array([0, 105, 195, 205, 300, 500, 1000]) + expected_y23 = np.array([0, 0, -1, -1, 0, 0, 0]) + expected_mp23 = np.array([2, 2, 1, 1, 1, 1, 1]) + + f = spk.spike_train_order_profile(st2, st3) + + assert_array_equal(f.x, expected_x23) + assert_array_equal(f.y, expected_y23) + assert_array_equal(f.mp, expected_mp23) + assert f.almost_equal(DiscreteFunc(expected_x23, expected_y23, + expected_mp23)) + assert_almost_equal(f.avrg(), -1.0/3.0) + assert_almost_equal(f.avrg(normalize=False), -2.0) + assert_almost_equal(spk.spike_train_order(st2, st3), -1.0/3.0) + assert_almost_equal(spk.spike_train_order(st2, st3, normalize=False), -2.0) + + f = spk.spike_train_order_profile_multi([st1, st2, st3]) + + expected_x = np.array([0, 100, 105, 195, 200, 205, 300, 500, 1000]) + expected_y = np.array([2, 2, 2, -2, 0, 0, 0, 0, 0]) + expected_mp = np.array([2, 2, 4, 2, 2, 2, 4, 2, 2]) + + assert_array_equal(f.x, expected_x) + assert_array_equal(f.y, expected_y) + assert_array_equal(f.mp, expected_mp) + + # Averaging the profile should be the same as computing the synfire indicator directly. + assert_almost_equal(f.avrg(), spk.spike_train_order([st1, st2, st3])) + + # We can also compute the synfire indicator from the Directionality Matrix: + D_matrix = spk.spike_directionality_matrix([st1, st2, st3], normalize=False) + num_spikes = np.sum(len(st) for st in [st1, st2, st3]) + syn_fire = np.sum(np.triu(D_matrix)) / num_spikes + assert_almost_equal(f.avrg(), syn_fire) diff --git a/test/test_function.py b/test/test_function.py index 92d378d..6c04839 100644 --- a/test/test_function.py +++ b/test/test_function.py @@ -10,6 +10,7 @@ Distributed under the BSD License from __future__ import print_function import numpy as np from copy import copy +from nose.tools import raises from numpy.testing import assert_equal, assert_almost_equal, \ assert_array_equal, assert_array_almost_equal @@ -49,6 +50,8 @@ def test_pwc(): assert_almost_equal(a, (0.5-0.5+0.5*1.5+1.0*0.75)/3.0, decimal=16) a = f.avrg([1.5, 3.5]) assert_almost_equal(a, (-0.5*0.5+0.5*1.5+1.0*0.75)/2.0, decimal=16) + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (1.0*-0.5)/1.0, decimal=16) a = f.avrg([1.0, 3.5]) assert_almost_equal(a, (-0.5*1.0+0.5*1.5+1.0*0.75)/2.5, decimal=16) a = f.avrg([1.0, 4.0]) @@ -120,6 +123,53 @@ def test_pwc_avrg(): assert_array_almost_equal(f1.x, x_expected, decimal=16) assert_array_almost_equal(f1.y, y_expected, decimal=16) +def test_pwc_integral(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + + # test full interval + full = 1.0*1.0 + 1.0*-0.5 + 0.5*1.5 + 1.5*0.75; + assert_equal(f1.integral(), full) + assert_equal(f1.integral((np.min(x),np.max(x))), full) + # test part interval, spanning an edge + assert_equal(f1.integral((0.5,1.5)), 0.5*1.0 + 0.5*-0.5) + # test part interval, just over two edges + assert_almost_equal(f1.integral((1.0-1e-16,2+1e-16)), 1.0*-0.5, decimal=14) + # test part interval, between two edges + assert_equal(f1.integral((1.0,2.0)), 1.0*-0.5) + assert_equal(f1.integral((1.2,1.7)), (1.7-1.2)*-0.5) + # test part interval, start to before and after edge + assert_equal(f1.integral((0.0,0.7)), 0.7*1.0) + assert_equal(f1.integral((0.0,1.1)), 1.0*1.0+0.1*-0.5) + # test part interval, before and after edge till end + assert_equal(f1.integral((2.6,4.0)), (4.0-2.6)*0.75) + assert_equal(f1.integral((2.4,4.0)), (2.5-2.4)*1.5+(4-2.5)*0.75) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_inv(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((3,2)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_1(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((1,6)) + +@raises(ValueError) +def test_pwc_integral_bad_bounds_oob_2(): + # some random data + x = [0.0, 1.0, 2.0, 2.5, 4.0] + y = [1.0, -0.5, 1.5, 0.75] + f1 = spk.PieceWiseConstFunc(x, y) + f1.integral((-1,3)) def test_pwl(): x = [0.0, 1.0, 2.0, 2.5, 4.0] @@ -162,6 +212,18 @@ def test_pwl(): a = f.avrg([1.0, 4.0]) assert_almost_equal(a, (-0.45 + 0.75 + 1.5*0.5) / 3.0, decimal=16) + # interval between support points + a = f.avrg([1.1, 1.5]) + assert_almost_equal(a, (-0.5+0.1*0.1 - 0.45) * 0.5, decimal=14) + + # starting at a support point + a = f.avrg([1.0, 1.5]) + assert_almost_equal(a, (-0.5 - 0.45) * 0.5, decimal=14) + + # start and end at support point + a = f.avrg([1.0, 2.0]) + assert_almost_equal(a, (-0.5 - 0.4) * 0.5, decimal=14) + # averaging over multiple intervals a = f.avrg([(0.5, 1.5), (1.5, 2.5)]) assert_almost_equal(a, (1.375*0.5 - 0.45 + 0.75)/2.0, decimal=16) diff --git a/test/test_sync_filter.py b/test/test_sync_filter.py new file mode 100644 index 0000000..e259903 --- /dev/null +++ b/test/test_sync_filter.py @@ -0,0 +1,95 @@ +""" test_sync_filter.py + +Tests the spike sync based filtering + +Copyright 2015, Mario Mulansky + +Distributed under the BSD License + +""" + +from __future__ import print_function +import numpy as np +from numpy.testing import assert_equal, assert_almost_equal, \ + assert_array_almost_equal + +import pyspike as spk +from pyspike import SpikeTrain + + +def test_single_prof(): + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.1, 2.1, 3.8]) + st3 = np.array([0.9, 3.1, 4.1]) + + # cython implementation + try: + from pyspike.cython.cython_profiles import \ + coincidence_single_profile_cython as coincidence_impl + except ImportError: + from pyspike.cython.python_backend import \ + coincidence_single_python as coincidence_impl + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + print(coincidences) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + coincidences = np.array(coincidence_impl(st2, st1, 0, 5.0, 0.0)) + for i, t in enumerate(st2): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st3, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st3, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + assert_equal(coincidences[i], sync_prof.y[sync_prof.x == t], + "At index %d" % i) + + st1 = np.array([1.0, 2.0, 3.0, 4.0]) + st2 = np.array([1.0, 2.0, 4.0]) + + sync_prof = spk.spike_sync_profile(SpikeTrain(st1, 5.0), + SpikeTrain(st2, 5.0)) + + coincidences = np.array(coincidence_impl(st1, st2, 0, 5.0, 0.0)) + for i, t in enumerate(st1): + expected = sync_prof.y[sync_prof.x == t]/sync_prof.mp[sync_prof.x == t] + assert_equal(coincidences[i], expected, + "At index %d" % i) + + +def test_filter(): + st1 = SpikeTrain(np.array([1.0, 2.0, 3.0, 4.0]), 5.0) + st2 = SpikeTrain(np.array([1.1, 2.1, 3.8]), 5.0) + st3 = SpikeTrain(np.array([0.9, 3.1, 4.1]), 5.0) + + # filtered_spike_trains = spk.filter_by_spike_sync([st1, st2], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.0, 2.0, 4.0]) + # assert_equal(filtered_spike_trains[1].spikes, [1.1, 2.1, 3.8]) + + # filtered_spike_trains = spk.filter_by_spike_sync([st2, st1], 0.5) + + # assert_equal(filtered_spike_trains[0].spikes, [1.1, 2.1, 3.8]) + # assert_equal(filtered_spike_trains[1].spikes, [1.0, 2.0, 4.0]) + + filtered_spike_trains = spk.filter_by_spike_sync([st1, st2, st3], 0.75) + + for st in filtered_spike_trains: + print(st.spikes) + + assert_equal(filtered_spike_trains[0].spikes, [1.0, 4.0]) + assert_equal(filtered_spike_trains[1].spikes, [1.1, 3.8]) + assert_equal(filtered_spike_trains[2].spikes, [0.9, 4.1]) + + +if __name__ == "main": + test_single_prof() + test_filter() -- cgit v1.2.3